Skip to content

Commit 63e8675

Browse files
Fix codemode not working in tty-interactive mode
- chat-ui.ts now uses contextService.addEvents with codemode: true instead of calling streamLLMResponse directly - Add CodemodeValidationErrorEvent to detect when LLM doesn't output codemode tags, triggering retry with chastising error message - Add feed item renderers for CodemodeResult and CodemodeValidationError in opentui-chat.tsx - Update llm.ts to include CodemodeValidationErrorEvent in prompt conversion - Add agent continuation loop in chat-ui for codemode follow-ups 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent f949003 commit 63e8675

7 files changed

Lines changed: 261 additions & 33 deletions

File tree

src/cli/chat-ui.ts

Lines changed: 120 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,18 @@
66
*/
77
import type { AiError, LanguageModel } from "@effect/ai"
88
import type { Error as PlatformError, FileSystem } from "@effect/platform"
9-
import { Cause, Context, Effect, Fiber, Layer, Mailbox, Stream } from "effect"
10-
import { is } from "effect/Schema"
9+
import { Cause, Context, Effect, Fiber, Layer, Mailbox, Schema, Stream } from "effect"
1110
import {
1211
AssistantMessageEvent,
13-
type ContextEvent,
12+
CodemodeResultEvent,
13+
CodemodeValidationErrorEvent,
1414
LLMRequestInterruptedEvent,
1515
TextDeltaEvent,
1616
UserMessageEvent
1717
} from "../context.model.ts"
18-
import { ContextService } from "../context.service.ts"
19-
import type { ContextLoadError, ContextSaveError } from "../errors.ts"
18+
import { type ContextOrCodemodeEvent, ContextService } from "../context.service.ts"
19+
import type { CodeStorageError, ContextLoadError, ContextSaveError } from "../errors.ts"
2020
import type { CurrentLlmConfig } from "../llm-config.ts"
21-
import { streamLLMResponse } from "../llm.ts"
2221
import { type ChatController, runOpenTUIChat } from "./components/opentui-chat.tsx"
2322

2423
type ChatSignal =
@@ -32,7 +31,7 @@ export class ChatUI extends Context.Tag("@app/ChatUI")<
3231
contextName: string
3332
) => Effect.Effect<
3433
void,
35-
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError,
34+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError,
3635
LanguageModel.LanguageModel | FileSystem.FileSystem | CurrentLlmConfig
3736
>
3837
}
@@ -81,7 +80,7 @@ const runChatLoop = (
8180
mailbox: Mailbox.Mailbox<ChatSignal>
8281
): Effect.Effect<
8382
void,
84-
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError,
83+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError,
8584
LanguageModel.LanguageModel | FileSystem.FileSystem | CurrentLlmConfig
8685
> =>
8786
Effect.fn("ChatUI.runChatLoop")(function*() {
@@ -97,6 +96,18 @@ type TurnResult =
9796
| { readonly _tag: "continue" }
9897
| { readonly _tag: "exit" }
9998

99+
/** Check if event is displayable in the chat feed */
100+
const isDisplayableEvent = (event: ContextOrCodemodeEvent): boolean =>
101+
Schema.is(TextDeltaEvent)(event) ||
102+
Schema.is(AssistantMessageEvent)(event) ||
103+
Schema.is(CodemodeResultEvent)(event) ||
104+
Schema.is(CodemodeValidationErrorEvent)(event)
105+
106+
/** Check if event triggers continuation (agent loop) */
107+
const triggersContinuation = (event: ContextOrCodemodeEvent): boolean =>
108+
(Schema.is(CodemodeResultEvent)(event) && event.triggerAgentTurn === "after-current-turn") ||
109+
(Schema.is(CodemodeValidationErrorEvent)(event) && event.triggerAgentTurn === "after-current-turn")
110+
100111
const runChatTurn = (
101112
contextName: string,
102113
contextService: Context.Tag.Service<typeof ContextService>,
@@ -105,7 +116,7 @@ const runChatTurn = (
105116
pendingMessage: string | null
106117
): Effect.Effect<
107118
TurnResult,
108-
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError,
119+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError,
109120
LanguageModel.LanguageModel | FileSystem.FileSystem | CurrentLlmConfig
110121
> =>
111122
Effect.fn("ChatUI.runChatTurn")(function*() {
@@ -129,28 +140,105 @@ const runChatTurn = (
129140
}
130141

131142
const userEvent = new UserMessageEvent({ content: userMessage })
132-
133-
yield* contextService.persistEvent(contextName, userEvent)
134143
chat.addEvent(userEvent)
135144

136-
const events = yield* contextService.load(contextName)
137145
let accumulatedText = ""
146+
let needsContinuation = false
147+
148+
// Use contextService.addEvents with codemode enabled
149+
const eventStream = contextService.addEvents(contextName, [userEvent], { codemode: true })
138150

139151
const streamFiber = yield* Effect.fork(
140-
streamLLMResponse(events).pipe(
141-
Stream.tap((event: ContextEvent) =>
152+
eventStream.pipe(
153+
Stream.tap((event: ContextOrCodemodeEvent) =>
142154
Effect.sync(() => {
143-
if (is(TextDeltaEvent)(event)) {
155+
if (Schema.is(TextDeltaEvent)(event)) {
144156
accumulatedText += event.delta
157+
}
158+
if (triggersContinuation(event)) {
159+
needsContinuation = true
160+
}
161+
if (isDisplayableEvent(event)) {
145162
chat.addEvent(event)
146163
}
147164
})
148165
),
149-
Stream.filter(is(AssistantMessageEvent)),
150-
Stream.tap((event) =>
151-
Effect.gen(function*() {
152-
yield* contextService.persistEvent(contextName, event)
153-
chat.addEvent(event)
166+
Stream.runDrain
167+
)
168+
)
169+
170+
const result = yield* awaitStreamCompletion(streamFiber, mailbox)
171+
172+
if (result._tag === "completed") {
173+
// If we need continuation (codemode result with output), run another turn
174+
if (needsContinuation) {
175+
return yield* runAgentContinuation(contextName, contextService, chat, mailbox)
176+
}
177+
return { _tag: "continue" } as const
178+
}
179+
180+
if (result._tag === "exit") {
181+
if (accumulatedText.length > 0) {
182+
const interruptedEvent = new LLMRequestInterruptedEvent({
183+
requestId: crypto.randomUUID(),
184+
reason: "user_cancel",
185+
partialResponse: accumulatedText
186+
})
187+
yield* contextService.persistEvent(contextName, interruptedEvent)
188+
chat.addEvent(interruptedEvent)
189+
}
190+
return { _tag: "exit" } as const
191+
}
192+
193+
// result._tag === "interrupted" - user hit return during streaming
194+
if (accumulatedText.length > 0) {
195+
const interruptedEvent = new LLMRequestInterruptedEvent({
196+
requestId: crypto.randomUUID(),
197+
reason: result.newMessage ? "user_new_message" : "user_cancel",
198+
partialResponse: accumulatedText
199+
})
200+
yield* contextService.persistEvent(contextName, interruptedEvent)
201+
chat.addEvent(interruptedEvent)
202+
}
203+
204+
if (result.newMessage) {
205+
return yield* runChatTurn(contextName, contextService, chat, mailbox, result.newMessage)
206+
}
207+
208+
return { _tag: "continue" } as const
209+
})()
210+
211+
/** Run agent continuation loop (for codemode results that need follow-up) */
212+
const runAgentContinuation = (
213+
contextName: string,
214+
contextService: Context.Tag.Service<typeof ContextService>,
215+
chat: ChatController,
216+
mailbox: Mailbox.Mailbox<ChatSignal>
217+
): Effect.Effect<
218+
TurnResult,
219+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError,
220+
LanguageModel.LanguageModel | FileSystem.FileSystem | CurrentLlmConfig
221+
> =>
222+
Effect.fn("ChatUI.runAgentContinuation")(function*() {
223+
let accumulatedText = ""
224+
let needsContinuation = false
225+
226+
// Empty input events - the persisted CodemodeResult triggers the turn
227+
const eventStream = contextService.addEvents(contextName, [], { codemode: true })
228+
229+
const streamFiber = yield* Effect.fork(
230+
eventStream.pipe(
231+
Stream.tap((event: ContextOrCodemodeEvent) =>
232+
Effect.sync(() => {
233+
if (Schema.is(TextDeltaEvent)(event)) {
234+
accumulatedText += event.delta
235+
}
236+
if (triggersContinuation(event)) {
237+
needsContinuation = true
238+
}
239+
if (isDisplayableEvent(event)) {
240+
chat.addEvent(event)
241+
}
154242
})
155243
),
156244
Stream.runDrain
@@ -160,6 +248,9 @@ const runChatTurn = (
160248
const result = yield* awaitStreamCompletion(streamFiber, mailbox)
161249

162250
if (result._tag === "completed") {
251+
if (needsContinuation) {
252+
return yield* runAgentContinuation(contextName, contextService, chat, mailbox)
253+
}
163254
return { _tag: "continue" } as const
164255
}
165256

@@ -176,7 +267,7 @@ const runChatTurn = (
176267
return { _tag: "exit" } as const
177268
}
178269

179-
// result._tag === "interrupted" - user hit return during streaming
270+
// Interrupted - save partial and return to wait for input
180271
if (accumulatedText.length > 0) {
181272
const interruptedEvent = new LLMRequestInterruptedEvent({
182273
requestId: crypto.randomUUID(),
@@ -200,9 +291,15 @@ type StreamResult =
200291
| { readonly _tag: "interrupted"; readonly newMessage: string | null }
201292

202293
const awaitStreamCompletion = (
203-
fiber: Fiber.RuntimeFiber<void, AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError>,
294+
fiber: Fiber.RuntimeFiber<
295+
void,
296+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError
297+
>,
204298
mailbox: Mailbox.Mailbox<ChatSignal>
205-
): Effect.Effect<StreamResult, AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError> =>
299+
): Effect.Effect<
300+
StreamResult,
301+
AiError.AiError | PlatformError.PlatformError | ContextLoadError | ContextSaveError | CodeStorageError
302+
> =>
206303
Effect.fn("ChatUI.awaitStreamCompletion")(function*() {
207304
const waitForFiber = Fiber.join(fiber).pipe(Effect.as({ _tag: "completed" } as StreamResult))
208305
const waitForInterrupt = Effect.gen(function*() {

src/cli/commands.ts

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import { AppConfig } from "../config.ts"
1414
import {
1515
AssistantMessageEvent,
1616
CodemodeResultEvent,
17+
CodemodeValidationErrorEvent,
1718
FileAttachmentEvent,
1819
type InputEvent,
1920
SystemPromptEvent,
@@ -209,6 +210,16 @@ const handleEvent = (
209210
return
210211
}
211212

213+
// Handle CodemodeValidationError (LLM didn't output codemode)
214+
if (Schema.is(CodemodeValidationErrorEvent)(event)) {
215+
if (options.raw) {
216+
yield* Console.log(JSON.stringify(event))
217+
} else {
218+
yield* Console.log(red(`\n⚠ LLM response missing <codemode> tags. Retrying...`))
219+
}
220+
return
221+
}
222+
212223
// Handle standard context events
213224
if (options.raw) {
214225
if (Schema.is(TextDeltaEvent)(event) && !options.showEphemeral) {
@@ -299,7 +310,7 @@ const runEventStream = (
299310

300311
if (!options.raw) {
301312
yield* Console.log(dim(`\n[Agent continuing... (iteration ${iteration})]`))
302-
yield* Console.log(`\n${assistantLabel}`)
313+
yield* Console.log(`\n${green("Assistant:")}`)
303314
}
304315

305316
// Reset for next turn - the persisted CodemodeResult will trigger LLM

0 commit comments

Comments
 (0)