Skip to content

Commit fc2147a

Browse files
authored
fix: prevent crash when streaming chunks have null choices array (#9105)
1 parent ca64953 commit fc2147a

File tree

3 files changed

+51
-7
lines changed

3 files changed

+51
-7
lines changed

src/api/providers/__tests__/minimax.spec.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,34 @@ describe("MiniMaxHandler", () => {
279279
undefined,
280280
)
281281
})
282+
283+
it("should handle streaming chunks with null choices array", async () => {
284+
const testContent = "Content after null choices"
285+
286+
mockCreate.mockImplementationOnce(() => {
287+
return {
288+
[Symbol.asyncIterator]: () => ({
289+
next: vitest
290+
.fn()
291+
.mockResolvedValueOnce({
292+
done: false,
293+
value: { choices: null },
294+
})
295+
.mockResolvedValueOnce({
296+
done: false,
297+
value: { choices: [{ delta: { content: testContent } }] },
298+
})
299+
.mockResolvedValueOnce({ done: true }),
300+
}),
301+
}
302+
})
303+
304+
const stream = handler.createMessage("system prompt", [])
305+
const firstChunk = await stream.next()
306+
307+
expect(firstChunk.done).toBe(false)
308+
expect(firstChunk.value).toEqual({ type: "text", text: testContent })
309+
})
282310
})
283311

284312
describe("Model Configuration", () => {

src/api/providers/base-openai-compatible-provider.ts

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
116116
)
117117

118118
for await (const chunk of stream) {
119-
const delta = chunk.choices[0]?.delta
119+
// Check for provider-specific error responses (e.g., MiniMax base_resp)
120+
const chunkAny = chunk as any
121+
if (chunkAny.base_resp?.status_code && chunkAny.base_resp.status_code !== 0) {
122+
throw new Error(
123+
`${this.providerName} API Error (${chunkAny.base_resp.status_code}): ${chunkAny.base_resp.status_msg || "Unknown error"}`,
124+
)
125+
}
126+
127+
const delta = chunk.choices?.[0]?.delta
120128

121129
if (delta?.content) {
122130
for (const processedChunk of matcher.update(delta.content)) {
@@ -155,7 +163,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
155163
messages: [{ role: "user", content: prompt }],
156164
})
157165

158-
return response.choices[0]?.message.content || ""
166+
// Check for provider-specific error responses (e.g., MiniMax base_resp)
167+
const responseAny = response as any
168+
if (responseAny.base_resp?.status_code && responseAny.base_resp.status_code !== 0) {
169+
throw new Error(
170+
`${this.providerName} API Error (${responseAny.base_resp.status_code}): ${responseAny.base_resp.status_msg || "Unknown error"}`,
171+
)
172+
}
173+
174+
return response.choices?.[0]?.message.content || ""
159175
} catch (error) {
160176
throw handleOpenAIError(error, this.providerName)
161177
}

src/api/providers/openai.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
191191
let lastUsage
192192

193193
for await (const chunk of stream) {
194-
const delta = chunk.choices[0]?.delta ?? {}
194+
const delta = chunk.choices?.[0]?.delta ?? {}
195195

196196
if (delta.content) {
197197
for (const chunk of matcher.update(delta.content)) {
@@ -242,7 +242,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
242242

243243
yield {
244244
type: "text",
245-
text: response.choices[0]?.message.content || "",
245+
text: response.choices?.[0]?.message.content || "",
246246
}
247247

248248
yield this.processUsageMetrics(response.usage, modelInfo)
@@ -290,7 +290,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
290290
throw handleOpenAIError(error, this.providerName)
291291
}
292292

293-
return response.choices[0]?.message.content || ""
293+
return response.choices?.[0]?.message.content || ""
294294
} catch (error) {
295295
if (error instanceof Error) {
296296
throw new Error(`${this.providerName} completion error: ${error.message}`)
@@ -373,15 +373,15 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
373373

374374
yield {
375375
type: "text",
376-
text: response.choices[0]?.message.content || "",
376+
text: response.choices?.[0]?.message.content || "",
377377
}
378378
yield this.processUsageMetrics(response.usage)
379379
}
380380
}
381381

382382
private async *handleStreamResponse(stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>): ApiStream {
383383
for await (const chunk of stream) {
384-
const delta = chunk.choices[0]?.delta
384+
const delta = chunk.choices?.[0]?.delta
385385
if (delta?.content) {
386386
yield {
387387
type: "text",

0 commit comments

Comments
 (0)