@@ -5,7 +5,7 @@ import { toPrompt } from '@/lib/prompt'
55import ratelimit from '@/lib/ratelimit'
66import { fragmentSchema as schema } from '@/lib/schema'
77import { Templates } from '@/lib/templates'
8- import { streamObject , LanguageModel , CoreMessage } from 'ai'
8+ import { streamText , Output , LanguageModel , type ModelMessage } from 'ai'
99
1010export const maxDuration = 300
1111
@@ -25,7 +25,7 @@ export async function POST(req: Request) {
2525 model,
2626 config,
2727 } : {
28- messages : CoreMessage [ ]
28+ messages : ModelMessage [ ]
2929 userID : string | undefined
3030 teamID : string | undefined
3131 template : Templates
@@ -54,18 +54,29 @@ export async function POST(req: Request) {
5454 const { model : modelNameString , apiKey : modelApiKey , ...modelParams } = config
5555 const modelClient = getModelClient ( model , config )
5656
57- try {
58- const stream = await streamObject ( {
59- model : modelClient as LanguageModel ,
60- schema,
61- system : toPrompt ( template ) ,
62- messages,
63- maxRetries : 0 , // do not retry on errors
64- ...modelParams ,
65- } )
57+ let apiError : any = null
58+
59+ const result = streamText ( {
60+ model : modelClient as LanguageModel ,
61+ output : Output . object ( { schema } ) ,
62+ system : toPrompt ( template ) ,
63+ messages,
64+ maxRetries : 0 ,
65+ onError : ( { error } ) => {
66+ apiError = error
67+ } ,
68+ ...modelParams ,
69+ } )
6670
67- return stream . toTextStreamResponse ( )
68- } catch ( error : any ) {
69- return handleAPIError ( error , { hasOwnApiKey : ! ! config . apiKey } )
71+ // Check if API call succeeds by awaiting first chunk
72+ try {
73+ await result . response
74+ } catch {
75+ // apiError is set by onError callback with the actual API error
76+ if ( apiError ) {
77+ return handleAPIError ( apiError , { hasOwnApiKey : ! ! config . apiKey } )
78+ }
7079 }
80+
81+ return result . toTextStreamResponse ( )
7182}
0 commit comments