Skip to content

Commit 876652a

Browse files
committed
Merge branch 'v5' into v5-gr2m/5682-zod-4
2 parents 45b4e91 + 4f9fc9c commit 876652a

File tree

119 files changed

+1318
-993
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

119 files changed

+1318
-993
lines changed

.changeset/eleven-pets-clean.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/provider': patch
3+
---
4+
5+
chore (provider): tweak provider definition

.changeset/empty-fireants-learn.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': major
3+
---
4+
5+
chore (ai): remove exports of internal ui functions

.changeset/hip-rocks-mix.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
feat (ai): support string model ids through gateway

.changeset/khaki-tomatoes-think.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
fix (ai): do not send id with start unless specified

.changeset/pre.json

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,8 @@
103103
"eight-emus-push",
104104
"eighty-seals-search",
105105
"eleven-lobsters-rescue",
106+
"eleven-pets-clean",
107+
"empty-fireants-learn",
106108
"empty-pets-jump",
107109
"fair-bikes-hear",
108110
"fair-cobras-tan",
@@ -146,6 +148,7 @@
146148
"heavy-ligers-lay",
147149
"heavy-pens-destroy",
148150
"hip-eagles-attend",
151+
"hip-rocks-mix",
149152
"hot-colts-hear",
150153
"huge-cloths-burn",
151154
"hungry-bears-glow",
@@ -159,6 +162,7 @@
159162
"itchy-cars-relax",
160163
"itchy-deers-jog",
161164
"khaki-bears-drop",
165+
"khaki-tomatoes-think",
162166
"large-peas-eat",
163167
"large-ties-own",
164168
"late-brooms-suffer",
@@ -172,6 +176,7 @@
172176
"little-carrots-speak",
173177
"little-tips-occur",
174178
"little-zebras-suffer",
179+
"lovely-garlics-promise",
175180
"many-beans-exercise",
176181
"many-toes-glow",
177182
"mean-files-talk",
@@ -207,6 +212,7 @@
207212
"pretty-doors-promise",
208213
"pretty-jars-reflect",
209214
"proud-cows-bathe",
215+
"proud-dancers-doubt",
210216
"purple-rocks-cover",
211217
"quick-toys-help",
212218
"quiet-glasses-double",
@@ -220,6 +226,7 @@
220226
"rotten-walls-provide",
221227
"rude-badgers-roll",
222228
"rude-bugs-run",
229+
"rude-rivers-hide",
223230
"selfish-rice-own",
224231
"selfish-wasps-applaud",
225232
"serious-clouds-cheer",
@@ -251,6 +258,7 @@
251258
"spicy-mangos-brush",
252259
"spicy-shoes-matter",
253260
"spotty-swans-know",
261+
"stale-tools-exercise",
254262
"strange-apricots-enjoy",
255263
"strange-camels-decide",
256264
"sweet-lobsters-type",
@@ -260,6 +268,7 @@
260268
"swift-ghosts-itch",
261269
"swift-turtles-rhyme",
262270
"tall-rice-flash",
271+
"tasty-starfishes-swim",
263272
"ten-ligers-turn",
264273
"ten-students-yell",
265274
"tender-buses-glow",

.changeset/rude-rivers-hide.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': major
3+
---
4+
5+
chore (ai): remove steps from tool invocation ui parts

.changeset/stale-tools-exercise.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/provider': patch
3+
---
4+
5+
release alpha.7

packages/ai/CHANGELOG.md

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,32 @@
11
# ai
22

3+
## 5.0.0-alpha.7
4+
5+
### Major Changes
6+
7+
- db345da: chore (ai): remove exports of internal ui functions
8+
- 247ee0c: chore (ai): remove steps from tool invocation ui parts
9+
10+
### Patch Changes
11+
12+
- 9b0da33: fix (ai): do not send id with start unless specified
13+
- Updated dependencies [5c56081]
14+
- @ai-sdk/provider@2.0.0-alpha.7
15+
- @ai-sdk/gateway@1.0.0-alpha.7
16+
- @ai-sdk/provider-utils@3.0.0-alpha.7
17+
18+
## 5.0.0-alpha.6
19+
20+
### Patch Changes
21+
22+
- 0d2c085: feat (ai): support string model ids through gateway
23+
- 48a7606: feat (ai): support changing the system prompt in prepareSteps
24+
- Updated dependencies [0d2c085]
25+
- Updated dependencies [6c2c708]
26+
- @ai-sdk/provider@2.0.0-alpha.6
27+
- @ai-sdk/gateway@1.0.0-alpha.6
28+
- @ai-sdk/provider-utils@3.0.0-alpha.6
29+
330
## 5.0.0-alpha.5
431

532
### Major Changes

packages/ai/core/generate-object/generate-object.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ import { LanguageModelUsage } from '../types/usage';
3939
import { GenerateObjectResult } from './generate-object-result';
4040
import { getOutputStrategy } from './output-strategy';
4141
import { validateObjectGenerationInput } from './validate-object-generation-input';
42+
import { resolveLanguageModel } from '../prompt/resolve-language-model';
4243

4344
const originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });
4445

@@ -211,7 +212,7 @@ Default and recommended: 'auto' (best mode for the model).
211212
},
212213
): Promise<GenerateObjectResult<RESULT>> {
213214
const {
214-
model,
215+
model: modelArg,
215216
output = 'object',
216217
system,
217218
prompt,
@@ -229,6 +230,8 @@ Default and recommended: 'auto' (best mode for the model).
229230
...settings
230231
} = options;
231232

233+
const model = resolveLanguageModel(modelArg);
234+
232235
const enumValues = 'enum' in options ? options.enum : undefined;
233236
const {
234237
schema: inputSchema,

packages/ai/core/generate-object/stream-object.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import { LanguageModelUsage } from '../types/usage';
4646
import { getOutputStrategy, OutputStrategy } from './output-strategy';
4747
import { ObjectStreamPart, StreamObjectResult } from './stream-object-result';
4848
import { validateObjectGenerationInput } from './validate-object-generation-input';
49+
import { resolveLanguageModel } from '../prompt/resolve-language-model';
4950

5051
const originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });
5152

@@ -360,7 +361,7 @@ class DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>
360361
>;
361362

362363
constructor({
363-
model,
364+
model: modelArg,
364365
headers,
365366
telemetry,
366367
settings,
@@ -398,6 +399,8 @@ class DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>
398399
currentDate: () => Date;
399400
now: () => number;
400401
}) {
402+
const model = resolveLanguageModel(modelArg);
403+
401404
const { maxRetries, retry } = prepareRetries({
402405
maxRetries: maxRetriesArg,
403406
});

packages/ai/core/generate-text/__snapshots__/stream-text.test.ts.snap

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ exports[`streamText > multiple stream consumption > should support text stream,
6060
],
6161
"uiMessageStream": [
6262
{
63-
"messageId": "id-1",
63+
"messageId": undefined,
6464
"metadata": undefined,
6565
"type": "start",
6666
},
@@ -2816,7 +2816,7 @@ exports[`streamText > result.fullStream > should use fallback response metadata
28162816

28172817
exports[`streamText > result.pipeUIMessageStreamToResponse > should mask error messages by default 1`] = `
28182818
[
2819-
"data: {"type":"start","messageId":"id-0"}
2819+
"data: {"type":"start"}
28202820
28212821
",
28222822
"data: {"type":"start-step"}
@@ -2839,7 +2839,7 @@ exports[`streamText > result.pipeUIMessageStreamToResponse > should mask error m
28392839

28402840
exports[`streamText > result.pipeUIMessageStreamToResponse > should omit message finish event (d:) when sendFinish is false 1`] = `
28412841
[
2842-
"data: {"type":"start","messageId":"id-0"}
2842+
"data: {"type":"start"}
28432843
28442844
",
28452845
"data: {"type":"start-step"}
@@ -2859,7 +2859,7 @@ exports[`streamText > result.pipeUIMessageStreamToResponse > should omit message
28592859

28602860
exports[`streamText > result.pipeUIMessageStreamToResponse > should support custom error messages 1`] = `
28612861
[
2862-
"data: {"type":"start","messageId":"id-0"}
2862+
"data: {"type":"start"}
28632863
28642864
",
28652865
"data: {"type":"start-step"}
@@ -3263,7 +3263,7 @@ exports[`streamText > result.textStream > should swallow error to prevent server
32633263
exports[`streamText > result.toUIMessageStream > should create a data stream 1`] = `
32643264
[
32653265
{
3266-
"messageId": "id-0",
3266+
"messageId": undefined,
32673267
"metadata": undefined,
32683268
"type": "start",
32693269
},
@@ -3297,7 +3297,7 @@ exports[`streamText > result.toUIMessageStream > should create a data stream 1`]
32973297
exports[`streamText > result.toUIMessageStream > should mask error messages by default 1`] = `
32983298
[
32993299
{
3300-
"messageId": "id-0",
3300+
"messageId": undefined,
33013301
"metadata": undefined,
33023302
"type": "start",
33033303
},
@@ -3323,7 +3323,7 @@ exports[`streamText > result.toUIMessageStream > should mask error messages by d
33233323
exports[`streamText > result.toUIMessageStream > should omit message finish event when sendFinish is false 1`] = `
33243324
[
33253325
{
3326-
"messageId": "id-0",
3326+
"messageId": undefined,
33273327
"metadata": undefined,
33283328
"type": "start",
33293329
},
@@ -3366,7 +3366,7 @@ exports[`streamText > result.toUIMessageStream > should omit message start event
33663366
exports[`streamText > result.toUIMessageStream > should send file content 1`] = `
33673367
[
33683368
{
3369-
"messageId": "id-0",
3369+
"messageId": undefined,
33703370
"metadata": undefined,
33713371
"type": "start",
33723372
},
@@ -3402,7 +3402,7 @@ exports[`streamText > result.toUIMessageStream > should send file content 1`] =
34023402
exports[`streamText > result.toUIMessageStream > should send reasoning content when sendReasoning is true 1`] = `
34033403
[
34043404
{
3405-
"messageId": "id-0",
3405+
"messageId": undefined,
34063406
"metadata": undefined,
34073407
"type": "start",
34083408
},
@@ -3488,7 +3488,7 @@ exports[`streamText > result.toUIMessageStream > should send reasoning content w
34883488
exports[`streamText > result.toUIMessageStream > should send source content when sendSources is true 1`] = `
34893489
[
34903490
{
3491-
"messageId": "id-0",
3491+
"messageId": undefined,
34923492
"metadata": undefined,
34933493
"type": "start",
34943494
},
@@ -3536,7 +3536,7 @@ exports[`streamText > result.toUIMessageStream > should send source content when
35363536
exports[`streamText > result.toUIMessageStream > should send tool call and tool result stream parts 1`] = `
35373537
[
35383538
{
3539-
"messageId": "id-0",
3539+
"messageId": undefined,
35403540
"metadata": undefined,
35413541
"type": "start",
35423542
},
@@ -3571,7 +3571,7 @@ exports[`streamText > result.toUIMessageStream > should send tool call and tool
35713571
exports[`streamText > result.toUIMessageStream > should send tool call, tool call stream start, tool call deltas, and tool result stream parts when tool call delta flag is enabled 1`] = `
35723572
[
35733573
{
3574-
"messageId": "id-0",
3574+
"messageId": undefined,
35753575
"metadata": undefined,
35763576
"type": "start",
35773577
},
@@ -3621,7 +3621,7 @@ exports[`streamText > result.toUIMessageStream > should send tool call, tool cal
36213621
exports[`streamText > result.toUIMessageStream > should support custom error messages 1`] = `
36223622
[
36233623
{
3624-
"messageId": "id-0",
3624+
"messageId": undefined,
36253625
"metadata": undefined,
36263626
"type": "start",
36273627
},
@@ -3646,7 +3646,7 @@ exports[`streamText > result.toUIMessageStream > should support custom error mes
36463646

36473647
exports[`streamText > result.toUIMessageStreamResponse > should mask error messages by default 1`] = `
36483648
[
3649-
"data: {"type":"start","messageId":"id-0"}
3649+
"data: {"type":"start"}
36503650
36513651
",
36523652
"data: {"type":"start-step"}
@@ -3669,7 +3669,7 @@ exports[`streamText > result.toUIMessageStreamResponse > should mask error messa
36693669

36703670
exports[`streamText > result.toUIMessageStreamResponse > should support custom error messages 1`] = `
36713671
[
3672-
"data: {"type":"start","messageId":"id-0"}
3672+
"data: {"type":"start"}
36733673
36743674
",
36753675
"data: {"type":"start-step"}

packages/ai/core/generate-text/generate-text.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import {
2+
LanguageModelV2,
23
LanguageModelV2Content,
34
LanguageModelV2ToolCall,
45
} from '@ai-sdk/provider';
@@ -42,6 +43,7 @@ import { ToolCallArray } from './tool-call';
4243
import { ToolCallRepairFunction } from './tool-call-repair';
4344
import { ToolResultArray } from './tool-result';
4445
import { ToolSet } from './tool-set';
46+
import { resolveLanguageModel } from '../prompt/resolve-language-model';
4547

4648
const originalGenerateId = createIdGenerator({
4749
prefix: 'aitxt',
@@ -108,7 +110,7 @@ export async function generateText<
108110
OUTPUT = never,
109111
OUTPUT_PARTIAL = never,
110112
>({
111-
model,
113+
model: modelArg,
112114
tools,
113115
toolChoice,
114116
system,
@@ -215,6 +217,7 @@ A function that attempts to repair a tool call that failed to parse.
215217
currentDate?: () => Date;
216218
};
217219
}): Promise<GenerateTextResult<TOOLS, OUTPUT>> {
220+
const model = resolveLanguageModel(modelArg);
218221
const stopConditions = asArray(stopWhen);
219222
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
220223

@@ -259,7 +262,7 @@ A function that attempts to repair a tool call that failed to parse.
259262
const callSettings = prepareCallSettings(settings);
260263

261264
let currentModelResponse: Awaited<
262-
ReturnType<LanguageModel['doGenerate']>
265+
ReturnType<LanguageModelV2['doGenerate']>
263266
> & { response: { id: string; timestamp: Date; modelId: string } };
264267
let currentToolCalls: ToolCallArray<TOOLS> = [];
265268
let currentToolResults: ToolResultArray<TOOLS> = [];
@@ -286,7 +289,10 @@ A function that attempts to repair a tool call that failed to parse.
286289
supportedUrls: await model.supportedUrls,
287290
});
288291

289-
const stepModel = prepareStepResult?.model ?? model;
292+
const stepModel = resolveLanguageModel(
293+
prepareStepResult?.model ?? model,
294+
);
295+
290296
const { toolChoice: stepToolChoice, tools: stepTools } =
291297
prepareToolsAndToolChoice({
292298
tools,

0 commit comments

Comments
 (0)