Skip to content

Commit b57874d

Browse files
authored
[vibe-coding-platform] Ship latest updates (#1211)
This PR brings a number of improvements to the `vibe-coding-platform` example: - Fixed a bug persisting the prompt in local storage. - Moved from two tools (run command + wait command) to a single tool that takes a wait parameter to distinguish if the LLM needs to wait for completion or if the command is meant to run in background. This makes the LLM more reliable. - Moved from a sub-call to the LLM that generates paths & files to one where the main LLM call decides on the paths and requests just the content. - This allows for the main LLM call to have better context on the files that were already generated. - It reduces a lot hallucinations where it tries to upload the same files multiple times. - Integrated some ai-elements: - The conversation (including auto-scroll) is now from our newly component lib. - The spinners we show are also from the library. - Included error handling for all the tool call steps that are possible to fail. - This caused some issues where tool calls where shown in an inconsistent state, now they reflect the error status. - This also allows to return information for the LLM to fix in the next iteration instead of swallowing the errors. - It makes the whole workflow more reliable preventing the effect of hanging where actually there was a failure. - Introduced a self-correction feature that triggers once to prevent infinite iterations if the system is not capable to get to a solution. - Introduced settings to disable auto-correct and reasoning level for GPT-5.
1 parent 69a2b12 commit b57874d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

67 files changed

+6585
-847
lines changed

apps/vibe-coding-platform/ai/constants.ts

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,23 @@
11
import { type GatewayModelId } from '@ai-sdk/gateway'
22

3-
export const DEFAULT_MODEL: GatewayModelId[number] = 'openai/gpt-5'
3+
export enum Models {
4+
AmazonNovaPro = 'amazon/nova-pro',
5+
AnthropicClaude4Sonnet = 'anthropic/claude-4-sonnet',
6+
GoogleGeminiFlash = 'google/gemini-2.5-flash',
7+
MoonshotKimiK2 = 'moonshotai/kimi-k2',
8+
OpenAIGPT5 = 'gpt-5',
9+
XaiGrok3Fast = 'xai/grok-3-fast',
10+
}
11+
12+
export const DEFAULT_MODEL = Models.OpenAIGPT5
413

514
export const SUPPORTED_MODELS: GatewayModelId[] = [
6-
'amazon/nova-pro',
7-
'anthropic/claude-4-sonnet',
8-
'google/gemini-2.5-flash',
9-
'moonshotai/kimi-k2',
10-
'openai/gpt-4o',
11-
'openai/gpt-5',
12-
'openai/o4-mini',
13-
'xai/grok-3-fast',
15+
Models.AmazonNovaPro,
16+
Models.AnthropicClaude4Sonnet,
17+
Models.GoogleGeminiFlash,
18+
Models.MoonshotKimiK2,
19+
Models.OpenAIGPT5,
20+
Models.XaiGrok3Fast,
1421
]
1522

1623
export const TEST_PROMPTS = [

apps/vibe-coding-platform/ai/gateway.ts

Lines changed: 16 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,59 +1,48 @@
11
import type { JSONValue } from 'ai'
22
import type { OpenAIResponsesProviderOptions } from '@ai-sdk/openai'
3-
import { createGatewayProvider, type GatewayModelId } from '@ai-sdk/gateway'
3+
import { createGatewayProvider } from '@ai-sdk/gateway'
4+
import { Models } from './constants'
45

56
const gateway = createGatewayProvider({
67
baseURL: process.env.AI_GATEWAY_BASE_URL,
78
})
89

9-
interface AvailableModel {
10-
id: GatewayModelId | 'openai/gpt-5'
11-
name: string
12-
}
13-
14-
export async function getAvailableModels(): Promise<AvailableModel[]> {
10+
export async function getAvailableModels() {
1511
const response = await gateway.getAvailableModels()
16-
return [...response.models.map(({ id, name }) => ({ id, name }))]
12+
return response.models
13+
.map((model) => ({ id: model.id, name: model.name }))
14+
.concat([{ id: Models.OpenAIGPT5, name: 'GPT-5' }])
1715
}
1816

19-
interface ModelOptions {
17+
export interface ModelOptions {
2018
model: string
2119
providerOptions?: Record<string, Record<string, JSONValue>>
2220
headers?: Record<string, string>
2321
}
2422

25-
export function getModelOptions(modelId: string): ModelOptions {
26-
if (modelId === 'openai/o4-mini') {
27-
return {
28-
model: modelId,
29-
providerOptions: {
30-
openai: {
31-
reasoningEffort: 'low',
32-
reasoningSummary: 'detailed',
33-
} satisfies OpenAIResponsesProviderOptions,
34-
},
35-
}
36-
}
37-
38-
if (modelId === 'openai/gpt-5') {
23+
export function getModelOptions(
24+
modelId: string,
25+
options?: { reasoningEffort?: 'low' | 'medium' }
26+
): ModelOptions {
27+
if (modelId === Models.OpenAIGPT5) {
3928
return {
4029
model: modelId,
4130
providerOptions: {
4231
openai: {
4332
include: ['reasoning.encrypted_content'],
44-
reasoningEffort: 'low',
45-
reasoningSummary: 'detailed',
33+
reasoningEffort: options?.reasoningEffort ?? 'low',
34+
reasoningSummary: 'auto',
35+
serviceTier: 'priority',
4636
} satisfies OpenAIResponsesProviderOptions,
4737
},
4838
}
4939
}
5040

51-
if (modelId === 'anthropic/claude-4-sonnet') {
41+
if (modelId === Models.AnthropicClaude4Sonnet) {
5242
return {
5343
model: modelId,
5444
headers: { 'anthropic-beta': 'fine-grained-tool-streaming-2025-05-14' },
5545
providerOptions: {
56-
// gateway: { order: ["bedrock", "vertex"] },
5746
anthropic: {
5847
cacheControl: { type: 'ephemeral' },
5948
},
Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,37 @@
11
import z from 'zod/v3'
22

3+
export const errorSchema = z.object({
4+
message: z.string(),
5+
})
6+
37
export const dataPartSchema = z.object({
48
'create-sandbox': z.object({
59
sandboxId: z.string().optional(),
6-
status: z.enum(['loading', 'done']),
10+
status: z.enum(['loading', 'done', 'error']),
11+
error: errorSchema.optional(),
712
}),
813
'generating-files': z.object({
914
paths: z.array(z.string()),
10-
status: z.enum(['generating', 'uploading', 'uploaded', 'done']),
15+
status: z.enum(['generating', 'uploading', 'uploaded', 'done', 'error']),
16+
error: errorSchema.optional(),
1117
}),
1218
'run-command': z.object({
13-
command: z.string(),
14-
args: z.array(z.string()),
15-
status: z.enum(['loading', 'done']),
16-
commandId: z.string().optional(),
17-
sandboxId: z.string(),
18-
}),
19-
'wait-command': z.object({
2019
sandboxId: z.string(),
21-
commandId: z.string(),
20+
commandId: z.string().optional(),
2221
command: z.string(),
2322
args: z.array(z.string()),
23+
status: z.enum(['executing', 'running', 'waiting', 'done', 'error']),
2424
exitCode: z.number().optional(),
25-
status: z.enum(['loading', 'done']),
25+
error: errorSchema.optional(),
2626
}),
2727
'get-sandbox-url': z.object({
2828
url: z.string().optional(),
2929
status: z.enum(['loading', 'done']),
3030
}),
31+
'report-errors': z.object({
32+
summary: z.string(),
33+
paths: z.array(z.string()).optional(),
34+
}),
3135
})
3236

3337
export type DataPart = z.infer<typeof dataPartSchema>

apps/vibe-coding-platform/ai/tools/create-sandbox.ts

Lines changed: 36 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import type { UIMessageStreamWriter, UIMessage } from 'ai'
22
import type { DataPart } from '../messages/data-parts'
33
import { Sandbox } from '@vercel/sandbox'
4+
import { getRichError } from './get-rich-error'
45
import { tool } from 'ai'
56
import description from './create-sandbox.md'
67
import z from 'zod/v3'
@@ -15,9 +16,11 @@ export const createSandbox = ({ writer }: Params) =>
1516
inputSchema: z.object({
1617
timeout: z
1718
.number()
19+
.min(600000)
20+
.max(2700000)
1821
.optional()
1922
.describe(
20-
'Maximum time in milliseconds the Vercel Sandbox will remain active before automatically shutting down. Defaults to 300000ms (5 minutes). The sandbox will terminate all running processes when this timeout is reached.'
23+
'Maximum time in milliseconds the Vercel Sandbox will remain active before automatically shutting down. Minimum 600000ms (10 minutes), maximum 2700000ms (45 minutes). Defaults to 600000ms (10 minutes). The sandbox will terminate all running processes when this timeout is reached.'
2124
),
2225
ports: z
2326
.array(z.number())
@@ -34,17 +37,39 @@ export const createSandbox = ({ writer }: Params) =>
3437
data: { status: 'loading' },
3538
})
3639

37-
const sandbox = await Sandbox.create({
38-
timeout,
39-
ports,
40-
})
40+
try {
41+
const sandbox = await Sandbox.create({
42+
timeout: timeout ?? 600000,
43+
ports,
44+
})
4145

42-
writer.write({
43-
id: toolCallId,
44-
type: 'data-create-sandbox',
45-
data: { sandboxId: sandbox.sandboxId, status: 'done' },
46-
})
46+
writer.write({
47+
id: toolCallId,
48+
type: 'data-create-sandbox',
49+
data: { sandboxId: sandbox.sandboxId, status: 'done' },
50+
})
51+
52+
return (
53+
`Sandbox created with ID: ${sandbox.sandboxId}.` +
54+
`\nYou can now upload files, run commands, and access services on the exposed ports.`
55+
)
56+
} catch (error) {
57+
const richError = getRichError({
58+
action: 'Creating Sandbox',
59+
error,
60+
})
61+
62+
writer.write({
63+
id: toolCallId,
64+
type: 'data-create-sandbox',
65+
data: {
66+
error: { message: richError.error.message },
67+
status: 'error',
68+
},
69+
})
4770

48-
return `Sandbox created with ID: ${sandbox.sandboxId}. You can now upload files, run commands, and access services on the exposed ports.`
71+
console.log('Error creating Sandbox:', richError.error)
72+
return richError.message
73+
}
4974
},
5075
})

apps/vibe-coding-platform/ai/tools/generate-files-prompt.md

Lines changed: 0 additions & 11 deletions
This file was deleted.

0 commit comments

Comments
 (0)