From f9183c9cea2bfacab07f57676e645ee4c2a5798e Mon Sep 17 00:00:00 2001 From: Michael Bolin Date: Sun, 26 Oct 2025 23:03:23 -0700 Subject: [PATCH] feat: annotate conversations with model_provider for filtering --- codex-rs/app-server-protocol/src/protocol.rs | 8 + .../app-server/src/codex_message_processor.rs | 40 ++- .../app-server/tests/suite/list_resume.rs | 118 +++++++- codex-rs/core/src/rollout/list.rs | 65 ++++- codex-rs/core/src/rollout/recorder.rs | 13 +- codex-rs/core/src/rollout/tests.rs | 268 ++++++++++++++++-- codex-rs/core/tests/suite/cli_stream.rs | 14 +- codex-rs/core/tests/suite/client.rs | 3 +- codex-rs/core/tests/suite/review.rs | 3 +- .../core/tests/suite/rollout_list_find.rs | 3 +- codex-rs/exec/src/lib.rs | 12 +- codex-rs/protocol/src/protocol.rs | 2 + codex-rs/tui/src/lib.rs | 11 +- codex-rs/tui/src/resume_picker.rs | 74 ++++- 14 files changed, 560 insertions(+), 74 deletions(-) diff --git a/codex-rs/app-server-protocol/src/protocol.rs b/codex-rs/app-server-protocol/src/protocol.rs index ab7341a2abc..1b43cc1f2d3 100644 --- a/codex-rs/app-server-protocol/src/protocol.rs +++ b/codex-rs/app-server-protocol/src/protocol.rs @@ -320,6 +320,12 @@ pub struct ListConversationsParams { /// Opaque pagination cursor returned by a previous call. #[serde(skip_serializing_if = "Option::is_none")] pub cursor: Option, + /// Optional model provider filter (matches against session metadata). + /// - None => filter by the server's default model provider + /// - Some([]) => no filtering, include all providers + /// - Some([...]) => only include sessions with one of the specified providers + #[serde(skip_serializing_if = "Option::is_none")] + pub model_providers: Option>, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)] @@ -331,6 +337,8 @@ pub struct ConversationSummary { /// RFC3339 timestamp string for the session start, if available. #[serde(skip_serializing_if = "Option::is_none")] pub timestamp: Option, + /// Model provider recorded for the session (resolved when absent in metadata). + pub model_provider: String, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)] diff --git a/codex-rs/app-server/src/codex_message_processor.rs b/codex-rs/app-server/src/codex_message_processor.rs index b2b242fb297..5912d1f60c4 100644 --- a/codex-rs/app-server/src/codex_message_processor.rs +++ b/codex-rs/app-server/src/codex_message_processor.rs @@ -818,19 +818,38 @@ impl CodexMessageProcessor { request_id: RequestId, params: ListConversationsParams, ) { - let page_size = params.page_size.unwrap_or(25); + let ListConversationsParams { + page_size, + cursor, + model_providers: model_provider, + } = params; + let page_size = page_size.unwrap_or(25); // Decode the optional cursor string to a Cursor via serde (Cursor implements Deserialize from string) - let cursor_obj: Option = match params.cursor { + let cursor_obj: Option = match cursor { Some(s) => serde_json::from_str::(&format!("\"{s}\"")).ok(), None => None, }; let cursor_ref = cursor_obj.as_ref(); + let model_provider_filter = match model_provider { + Some(providers) => { + if providers.is_empty() { + None + } else { + Some(providers) + } + } + None => Some(vec![self.config.model_provider_id.clone()]), + }; + let model_provider_slice = model_provider_filter.as_deref(); + let fallback_provider = self.config.model_provider_id.clone(); let page = match RolloutRecorder::list_conversations( &self.config.codex_home, page_size, cursor_ref, INTERACTIVE_SESSION_SOURCES, + model_provider_slice, + fallback_provider.as_str(), ) .await { @@ -849,7 +868,7 @@ impl CodexMessageProcessor { let items = page .items .into_iter() - .filter_map(|it| extract_conversation_summary(it.path, &it.head)) + .filter_map(|it| extract_conversation_summary(it.path, &it.head, &fallback_provider)) .collect(); // Encode next_cursor as a plain string @@ -1627,6 +1646,7 @@ async fn on_exec_approval_response( fn extract_conversation_summary( path: PathBuf, head: &[serde_json::Value], + fallback_provider: &str, ) -> Option { let session_meta = match head.first() { Some(first_line) => serde_json::from_value::(first_line.clone()).ok()?, @@ -1651,12 +1671,17 @@ fn extract_conversation_summary( } else { Some(session_meta.timestamp.clone()) }; + let conversation_id = session_meta.id; + let model_provider = session_meta + .model_provider + .unwrap_or_else(|| fallback_provider.to_string()); Some(ConversationSummary { - conversation_id: session_meta.id, + conversation_id, timestamp, path, preview: preview.to_string(), + model_provider, }) } @@ -1680,7 +1705,8 @@ mod tests { "cwd": "/", "originator": "codex", "cli_version": "0.0.0", - "instructions": null + "instructions": null, + "model_provider": "test-provider" }), json!({ "type": "message", @@ -1700,7 +1726,8 @@ mod tests { }), ]; - let summary = extract_conversation_summary(path.clone(), &head).expect("summary"); + let summary = + extract_conversation_summary(path.clone(), &head, "test-provider").expect("summary"); assert_eq!(summary.conversation_id, conversation_id); assert_eq!( @@ -1709,6 +1736,7 @@ mod tests { ); assert_eq!(summary.path, path); assert_eq!(summary.preview, "Count to 5"); + assert_eq!(summary.model_provider, "test-provider"); Ok(()) } } diff --git a/codex-rs/app-server/tests/suite/list_resume.rs b/codex-rs/app-server/tests/suite/list_resume.rs index a4178b08738..85416f60bd2 100644 --- a/codex-rs/app-server/tests/suite/list_resume.rs +++ b/codex-rs/app-server/tests/suite/list_resume.rs @@ -30,18 +30,21 @@ async fn test_list_and_resume_conversations() { "2025-01-02T12-00-00", "2025-01-02T12:00:00Z", "Hello A", + Some("openai"), ); create_fake_rollout( codex_home.path(), "2025-01-01T13-00-00", "2025-01-01T13:00:00Z", "Hello B", + Some("openai"), ); create_fake_rollout( codex_home.path(), "2025-01-01T12-00-00", "2025-01-01T12:00:00Z", "Hello C", + None, ); let mut mcp = McpProcess::new(codex_home.path()) @@ -57,6 +60,7 @@ async fn test_list_and_resume_conversations() { .send_list_conversations_request(ListConversationsParams { page_size: Some(2), cursor: None, + model_providers: None, }) .await .expect("send listConversations"); @@ -74,6 +78,8 @@ async fn test_list_and_resume_conversations() { // Newest first; preview text should match assert_eq!(items[0].preview, "Hello A"); assert_eq!(items[1].preview, "Hello B"); + assert_eq!(items[0].model_provider, "openai"); + assert_eq!(items[1].model_provider, "openai"); assert!(items[0].path.is_absolute()); assert!(next_cursor.is_some()); @@ -82,6 +88,7 @@ async fn test_list_and_resume_conversations() { .send_list_conversations_request(ListConversationsParams { page_size: Some(2), cursor: next_cursor, + model_providers: None, }) .await .expect("send listConversations page 2"); @@ -99,7 +106,88 @@ async fn test_list_and_resume_conversations() { } = to_response::(resp2).expect("deserialize response"); assert_eq!(items2.len(), 1); assert_eq!(items2[0].preview, "Hello C"); - assert!(next2.is_some()); + assert_eq!(items2[0].model_provider, "openai"); + assert_eq!(next2, None); + + // Add a conversation with an explicit non-OpenAI provider for filter tests. + create_fake_rollout( + codex_home.path(), + "2025-01-01T11-30-00", + "2025-01-01T11:30:00Z", + "Hello TP", + Some("test-provider"), + ); + + // Filtering by model provider should return only matching sessions. + let filter_req_id = mcp + .send_list_conversations_request(ListConversationsParams { + page_size: Some(10), + cursor: None, + model_providers: Some(vec!["test-provider".to_string()]), + }) + .await + .expect("send listConversations filtered"); + let filter_resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(filter_req_id)), + ) + .await + .expect("listConversations filtered timeout") + .expect("listConversations filtered resp"); + let ListConversationsResponse { + items: filtered_items, + next_cursor: filtered_next, + } = to_response::(filter_resp).expect("deserialize filtered"); + assert_eq!(filtered_items.len(), 1); + assert_eq!(filtered_next, None); + assert_eq!(filtered_items[0].preview, "Hello TP"); + assert_eq!(filtered_items[0].model_provider, "test-provider"); + + // Empty filter should include every session regardless of provider metadata. + let unfiltered_req_id = mcp + .send_list_conversations_request(ListConversationsParams { + page_size: Some(10), + cursor: None, + model_providers: Some(Vec::new()), + }) + .await + .expect("send listConversations unfiltered"); + let unfiltered_resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(unfiltered_req_id)), + ) + .await + .expect("listConversations unfiltered timeout") + .expect("listConversations unfiltered resp"); + let ListConversationsResponse { + items: unfiltered_items, + next_cursor: unfiltered_next, + } = to_response::(unfiltered_resp) + .expect("deserialize unfiltered response"); + assert_eq!(unfiltered_items.len(), 4); + assert!(unfiltered_next.is_none()); + + let empty_req_id = mcp + .send_list_conversations_request(ListConversationsParams { + page_size: Some(10), + cursor: None, + model_providers: Some(vec!["other".to_string()]), + }) + .await + .expect("send listConversations filtered empty"); + let empty_resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(empty_req_id)), + ) + .await + .expect("listConversations filtered empty timeout") + .expect("listConversations filtered empty resp"); + let ListConversationsResponse { + items: empty_items, + next_cursor: empty_next, + } = to_response::(empty_resp).expect("deserialize filtered empty"); + assert!(empty_items.is_empty()); + assert!(empty_next.is_none()); // Now resume one of the sessions and expect a SessionConfigured notification and response. let resume_req_id = mcp @@ -152,7 +240,13 @@ async fn test_list_and_resume_conversations() { assert!(!conversation_id.to_string().is_empty()); } -fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) { +fn create_fake_rollout( + codex_home: &Path, + filename_ts: &str, + meta_rfc3339: &str, + preview: &str, + model_provider: Option<&str>, +) { let uuid = Uuid::new_v4(); // sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss) let year = &filename_ts[0..4]; @@ -164,18 +258,22 @@ fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl")); let mut lines = Vec::new(); // Meta line with timestamp (flattened meta in payload for new schema) + let mut payload = json!({ + "id": uuid, + "timestamp": meta_rfc3339, + "cwd": "/", + "originator": "codex", + "cli_version": "0.0.0", + "instructions": null, + }); + if let Some(provider) = model_provider { + payload["model_provider"] = json!(provider); + } lines.push( json!({ "timestamp": meta_rfc3339, "type": "session_meta", - "payload": { - "id": uuid, - "timestamp": meta_rfc3339, - "cwd": "/", - "originator": "codex", - "cli_version": "0.0.0", - "instructions": null - } + "payload": payload }) .to_string(), ); diff --git a/codex-rs/core/src/rollout/list.rs b/codex-rs/core/src/rollout/list.rs index b142bc2d04e..4a3fec7934e 100644 --- a/codex-rs/core/src/rollout/list.rs +++ b/codex-rs/core/src/rollout/list.rs @@ -54,6 +54,7 @@ struct HeadTailSummary { saw_session_meta: bool, saw_user_event: bool, source: Option, + model_provider: Option, created_at: Option, updated_at: Option, } @@ -109,6 +110,8 @@ pub(crate) async fn get_conversations( page_size: usize, cursor: Option<&Cursor>, allowed_sources: &[SessionSource], + model_providers: Option<&[String]>, + default_provider: &str, ) -> io::Result { let mut root = codex_home.to_path_buf(); root.push(SESSIONS_SUBDIR); @@ -124,8 +127,17 @@ pub(crate) async fn get_conversations( let anchor = cursor.cloned(); - let result = - traverse_directories_for_paths(root.clone(), page_size, anchor, allowed_sources).await?; + let provider_matcher = + model_providers.and_then(|filters| ProviderMatcher::new(filters, default_provider)); + + let result = traverse_directories_for_paths( + root.clone(), + page_size, + anchor, + allowed_sources, + provider_matcher.as_ref(), + ) + .await?; Ok(result) } @@ -145,6 +157,7 @@ async fn traverse_directories_for_paths( page_size: usize, anchor: Option, allowed_sources: &[SessionSource], + provider_matcher: Option<&ProviderMatcher<'_>>, ) -> io::Result { let mut items: Vec = Vec::with_capacity(page_size); let mut scanned_files = 0usize; @@ -153,6 +166,7 @@ async fn traverse_directories_for_paths( Some(c) => (c.ts, c.id), None => (OffsetDateTime::UNIX_EPOCH, Uuid::nil()), }; + let mut more_matches_available = false; let year_dirs = collect_dirs_desc(&root, |s| s.parse::().ok()).await?; @@ -184,6 +198,7 @@ async fn traverse_directories_for_paths( for (ts, sid, _name_str, path) in day_files.into_iter() { scanned_files += 1; if scanned_files >= MAX_SCAN_FILES && items.len() >= page_size { + more_matches_available = true; break 'outer; } if !anchor_passed { @@ -194,6 +209,7 @@ async fn traverse_directories_for_paths( } } if items.len() == page_size { + more_matches_available = true; break 'outer; } // Read head and simultaneously detect message events within the same @@ -208,6 +224,11 @@ async fn traverse_directories_for_paths( { continue; } + if let Some(matcher) = provider_matcher + && !matcher.matches(summary.model_provider.as_deref()) + { + continue; + } // Apply filters: must have session meta and at least one user message event if summary.saw_session_meta && summary.saw_user_event { let HeadTailSummary { @@ -231,12 +252,21 @@ async fn traverse_directories_for_paths( } } - let next = build_next_cursor(&items); + let reached_scan_cap = scanned_files >= MAX_SCAN_FILES; + if reached_scan_cap && !items.is_empty() { + more_matches_available = true; + } + + let next = if more_matches_available { + build_next_cursor(&items) + } else { + None + }; Ok(ConversationsPage { items, next_cursor: next, num_scanned_files: scanned_files, - reached_scan_cap: scanned_files >= MAX_SCAN_FILES, + reached_scan_cap, }) } @@ -328,6 +358,32 @@ fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uui Some((ts, uuid)) } +struct ProviderMatcher<'a> { + filters: &'a [String], + matches_default_provider: bool, +} + +impl<'a> ProviderMatcher<'a> { + fn new(filters: &'a [String], default_provider: &'a str) -> Option { + if filters.is_empty() { + return None; + } + + let matches_default_provider = filters.iter().any(|provider| provider == default_provider); + Some(Self { + filters, + matches_default_provider, + }) + } + + fn matches(&self, session_provider: Option<&str>) -> bool { + match session_provider { + Some(provider) => self.filters.iter().any(|candidate| candidate == provider), + None => self.matches_default_provider, + } + } +} + async fn read_head_and_tail( path: &Path, head_limit: usize, @@ -354,6 +410,7 @@ async fn read_head_and_tail( match rollout_line.item { RolloutItem::SessionMeta(session_meta_line) => { summary.source = Some(session_meta_line.meta.source); + summary.model_provider = session_meta_line.meta.model_provider.clone(); summary.created_at = summary .created_at .clone() diff --git a/codex-rs/core/src/rollout/recorder.rs b/codex-rs/core/src/rollout/recorder.rs index 95f5d479b8d..f516e3a1796 100644 --- a/codex-rs/core/src/rollout/recorder.rs +++ b/codex-rs/core/src/rollout/recorder.rs @@ -97,8 +97,18 @@ impl RolloutRecorder { page_size: usize, cursor: Option<&Cursor>, allowed_sources: &[SessionSource], + model_providers: Option<&[String]>, + default_provider: &str, ) -> std::io::Result { - get_conversations(codex_home, page_size, cursor, allowed_sources).await + get_conversations( + codex_home, + page_size, + cursor, + allowed_sources, + model_providers, + default_provider, + ) + .await } /// Attempt to create a new [`RolloutRecorder`]. If the sessions directory @@ -137,6 +147,7 @@ impl RolloutRecorder { cli_version: env!("CARGO_PKG_VERSION").to_string(), instructions, source, + model_provider: Some(config.model_provider_id.clone()), }), ) } diff --git a/codex-rs/core/src/rollout/tests.rs b/codex-rs/core/src/rollout/tests.rs index 617e5189c2c..a7bc9f8c8f9 100644 --- a/codex-rs/core/src/rollout/tests.rs +++ b/codex-rs/core/src/rollout/tests.rs @@ -32,6 +32,14 @@ use codex_protocol::protocol::SessionSource; use codex_protocol::protocol::UserMessageEvent; const NO_SOURCE_FILTER: &[SessionSource] = &[]; +const TEST_PROVIDER: &str = "test-provider"; + +fn provider_vec(providers: &[&str]) -> Vec { + providers + .iter() + .map(std::string::ToString::to_string) + .collect() +} fn write_session_file( root: &Path, @@ -39,6 +47,24 @@ fn write_session_file( uuid: Uuid, num_records: usize, source: Option, +) -> std::io::Result<(OffsetDateTime, Uuid)> { + write_session_file_with_provider( + root, + ts_str, + uuid, + num_records, + source, + Some("test-provider"), + ) +} + +fn write_session_file_with_provider( + root: &Path, + ts_str: &str, + uuid: Uuid, + num_records: usize, + source: Option, + model_provider: Option<&str>, ) -> std::io::Result<(OffsetDateTime, Uuid)> { let format: &[FormatItem] = format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]"); @@ -68,6 +94,9 @@ fn write_session_file( if let Some(source) = source { payload["source"] = serde_json::to_value(source).unwrap(); } + if let Some(provider) = model_provider { + payload["model_provider"] = serde_json::Value::String(provider.to_string()); + } let meta = serde_json::json!({ "timestamp": ts_str, @@ -134,9 +163,17 @@ async fn test_list_conversations_latest_first() { ) .unwrap(); - let page = get_conversations(home, 10, None, INTERACTIVE_SESSION_SOURCES) - .await - .unwrap(); + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page = get_conversations( + home, + 10, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await + .unwrap(); // Build expected objects let p1 = home @@ -166,6 +203,7 @@ async fn test_list_conversations_latest_first() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let head_2 = vec![serde_json::json!({ "id": u2, @@ -175,6 +213,7 @@ async fn test_list_conversations_latest_first() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let head_1 = vec![serde_json::json!({ "id": u1, @@ -184,11 +223,9 @@ async fn test_list_conversations_latest_first() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; - let expected_cursor: Cursor = - serde_json::from_str(&format!("\"2025-01-01T12-00-00|{u1}\"")).unwrap(); - let expected = ConversationsPage { items: vec![ ConversationItem { @@ -213,7 +250,7 @@ async fn test_list_conversations_latest_first() { updated_at: Some("2025-01-01T12-00-00".into()), }, ], - next_cursor: Some(expected_cursor), + next_cursor: None, num_scanned_files: 3, reached_scan_cap: false, }; @@ -275,9 +312,17 @@ async fn test_pagination_cursor() { ) .unwrap(); - let page1 = get_conversations(home, 2, None, INTERACTIVE_SESSION_SOURCES) - .await - .unwrap(); + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page1 = get_conversations( + home, + 2, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await + .unwrap(); let p5 = home .join("sessions") .join("2025") @@ -298,6 +343,7 @@ async fn test_pagination_cursor() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let head_4 = vec![serde_json::json!({ "id": u4, @@ -307,6 +353,7 @@ async fn test_pagination_cursor() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let expected_cursor1: Cursor = serde_json::from_str(&format!("\"2025-03-04T09-00-00|{u4}\"")).unwrap(); @@ -338,6 +385,8 @@ async fn test_pagination_cursor() { 2, page1.next_cursor.as_ref(), INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, ) .await .unwrap(); @@ -361,6 +410,7 @@ async fn test_pagination_cursor() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let head_2 = vec![serde_json::json!({ "id": u2, @@ -370,6 +420,7 @@ async fn test_pagination_cursor() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; let expected_cursor2: Cursor = serde_json::from_str(&format!("\"2025-03-02T09-00-00|{u2}\"")).unwrap(); @@ -401,6 +452,8 @@ async fn test_pagination_cursor() { 2, page2.next_cursor.as_ref(), INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, ) .await .unwrap(); @@ -418,9 +471,8 @@ async fn test_pagination_cursor() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; - let expected_cursor3: Cursor = - serde_json::from_str(&format!("\"2025-03-01T09-00-00|{u1}\"")).unwrap(); let expected_page3 = ConversationsPage { items: vec![ConversationItem { path: p1, @@ -429,7 +481,7 @@ async fn test_pagination_cursor() { created_at: Some("2025-03-01T09-00-00".into()), updated_at: Some("2025-03-01T09-00-00".into()), }], - next_cursor: Some(expected_cursor3), + next_cursor: None, num_scanned_files: 5, // scanned 05, 04 (anchor), 03, 02 (anchor), 01 reached_scan_cap: false, }; @@ -445,9 +497,17 @@ async fn test_get_conversation_contents() { let ts = "2025-04-01T10-30-00"; write_session_file(home, ts, uuid, 2, Some(SessionSource::VSCode)).unwrap(); - let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES) - .await - .unwrap(); + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page = get_conversations( + home, + 1, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await + .unwrap(); let path = &page.items[0].path; let content = get_conversation(path).await.unwrap(); @@ -467,8 +527,8 @@ async fn test_get_conversation_contents() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })]; - let expected_cursor: Cursor = serde_json::from_str(&format!("\"{ts}|{uuid}\"")).unwrap(); let expected_page = ConversationsPage { items: vec![ConversationItem { path: expected_path, @@ -477,7 +537,7 @@ async fn test_get_conversation_contents() { created_at: Some(ts.into()), updated_at: Some(ts.into()), }], - next_cursor: Some(expected_cursor), + next_cursor: None, num_scanned_files: 1, reached_scan_cap: false, }; @@ -495,6 +555,7 @@ async fn test_get_conversation_contents() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", } }); let user_event = serde_json::json!({ @@ -532,6 +593,7 @@ async fn test_tail_includes_last_response_items() -> Result<()> { originator: "test_originator".into(), cli_version: "test_version".into(), source: SessionSource::VSCode, + model_provider: Some("test-provider".into()), }, git: None, }), @@ -563,7 +625,16 @@ async fn test_tail_includes_last_response_items() -> Result<()> { } drop(file); - let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page = get_conversations( + home, + 1, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await?; let item = page.items.first().expect("conversation item"); let tail_len = item.tail.len(); assert_eq!(tail_len, 10usize.min(total_messages)); @@ -615,6 +686,7 @@ async fn test_tail_handles_short_sessions() -> Result<()> { originator: "test_originator".into(), cli_version: "test_version".into(), source: SessionSource::VSCode, + model_provider: Some("test-provider".into()), }, git: None, }), @@ -645,7 +717,16 @@ async fn test_tail_handles_short_sessions() -> Result<()> { } drop(file); - let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page = get_conversations( + home, + 1, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await?; let tail = &page.items.first().expect("conversation item").tail; assert_eq!(tail.len(), 3); @@ -699,6 +780,7 @@ async fn test_tail_skips_trailing_non_responses() -> Result<()> { originator: "test_originator".into(), cli_version: "test_version".into(), source: SessionSource::VSCode, + model_provider: Some("test-provider".into()), }, git: None, }), @@ -743,7 +825,16 @@ async fn test_tail_skips_trailing_non_responses() -> Result<()> { writeln!(file, "{}", serde_json::to_string(&shutdown_event)?)?; drop(file); - let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page = get_conversations( + home, + 1, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await?; let tail = &page.items.first().expect("conversation item").tail; let expected: Vec = (0..4) @@ -785,9 +876,17 @@ async fn test_stable_ordering_same_second_pagination() { write_session_file(home, ts, u2, 0, Some(SessionSource::VSCode)).unwrap(); write_session_file(home, ts, u3, 0, Some(SessionSource::VSCode)).unwrap(); - let page1 = get_conversations(home, 2, None, INTERACTIVE_SESSION_SOURCES) - .await - .unwrap(); + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let page1 = get_conversations( + home, + 2, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await + .unwrap(); let p3 = home .join("sessions") @@ -810,6 +909,7 @@ async fn test_stable_ordering_same_second_pagination() { "originator": "test_originator", "cli_version": "test_version", "source": "vscode", + "model_provider": "test-provider", })] }; let expected_cursor1: Cursor = serde_json::from_str(&format!("\"{ts}|{u2}\"")).unwrap(); @@ -841,6 +941,8 @@ async fn test_stable_ordering_same_second_pagination() { 2, page1.next_cursor.as_ref(), INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, ) .await .unwrap(); @@ -850,7 +952,6 @@ async fn test_stable_ordering_same_second_pagination() { .join("07") .join("01") .join(format!("rollout-2025-07-01T00-00-00-{u1}.jsonl")); - let expected_cursor2: Cursor = serde_json::from_str(&format!("\"{ts}|{u1}\"")).unwrap(); let expected_page2 = ConversationsPage { items: vec![ConversationItem { path: p1, @@ -859,7 +960,7 @@ async fn test_stable_ordering_same_second_pagination() { created_at: Some(ts.to_string()), updated_at: Some(ts.to_string()), }], - next_cursor: Some(expected_cursor2), + next_cursor: None, num_scanned_files: 3, // scanned u3, u2 (anchor), u1 reached_scan_cap: false, }; @@ -891,9 +992,17 @@ async fn test_source_filter_excludes_non_matching_sessions() { ) .unwrap(); - let interactive_only = get_conversations(home, 10, None, INTERACTIVE_SESSION_SOURCES) - .await - .unwrap(); + let provider_filter = provider_vec(&[TEST_PROVIDER]); + let interactive_only = get_conversations( + home, + 10, + None, + INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + TEST_PROVIDER, + ) + .await + .unwrap(); let paths: Vec<_> = interactive_only .items .iter() @@ -905,7 +1014,7 @@ async fn test_source_filter_excludes_non_matching_sessions() { path.ends_with("rollout-2025-08-02T10-00-00-00000000-0000-0000-0000-00000000002a.jsonl") })); - let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER) + let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER, None, TEST_PROVIDER) .await .unwrap(); let all_paths: Vec<_> = all_sessions @@ -921,3 +1030,102 @@ async fn test_source_filter_excludes_non_matching_sessions() { path.ends_with("rollout-2025-08-01T10-00-00-00000000-0000-0000-0000-00000000004d.jsonl") })); } + +#[tokio::test] +async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<()> { + let temp = TempDir::new().unwrap(); + let home = temp.path(); + + let openai_id = Uuid::from_u128(1); + let beta_id = Uuid::from_u128(2); + let none_id = Uuid::from_u128(3); + + write_session_file_with_provider( + home, + "2025-09-01T12-00-00", + openai_id, + 1, + Some(SessionSource::VSCode), + Some("openai"), + )?; + write_session_file_with_provider( + home, + "2025-09-01T11-00-00", + beta_id, + 1, + Some(SessionSource::VSCode), + Some("beta"), + )?; + write_session_file_with_provider( + home, + "2025-09-01T10-00-00", + none_id, + 1, + Some(SessionSource::VSCode), + None, + )?; + + let openai_id_str = openai_id.to_string(); + let none_id_str = none_id.to_string(); + let openai_filter = provider_vec(&["openai"]); + let openai_sessions = get_conversations( + home, + 10, + None, + NO_SOURCE_FILTER, + Some(openai_filter.as_slice()), + "openai", + ) + .await?; + assert_eq!(openai_sessions.items.len(), 2); + let openai_ids: Vec<_> = openai_sessions + .items + .iter() + .filter_map(|item| { + item.head + .first() + .and_then(|value| value.get("id")) + .and_then(serde_json::Value::as_str) + .map(str::to_string) + }) + .collect(); + assert!(openai_ids.contains(&openai_id_str)); + assert!(openai_ids.contains(&none_id_str)); + + let beta_filter = provider_vec(&["beta"]); + let beta_sessions = get_conversations( + home, + 10, + None, + NO_SOURCE_FILTER, + Some(beta_filter.as_slice()), + "openai", + ) + .await?; + assert_eq!(beta_sessions.items.len(), 1); + let beta_id_str = beta_id.to_string(); + let beta_head = beta_sessions + .items + .first() + .and_then(|item| item.head.first()) + .and_then(|value| value.get("id")) + .and_then(serde_json::Value::as_str); + assert_eq!(beta_head, Some(beta_id_str.as_str())); + + let unknown_filter = provider_vec(&["unknown"]); + let unknown_sessions = get_conversations( + home, + 10, + None, + NO_SOURCE_FILTER, + Some(unknown_filter.as_slice()), + "openai", + ) + .await?; + assert!(unknown_sessions.items.is_empty()); + + let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER, None, "openai").await?; + assert_eq!(all_sessions.items.len(), 3); + + Ok(()) +} diff --git a/codex-rs/core/tests/suite/cli_stream.rs b/codex-rs/core/tests/suite/cli_stream.rs index 497730926ae..4c1f5fb8a88 100644 --- a/codex-rs/core/tests/suite/cli_stream.rs +++ b/codex-rs/core/tests/suite/cli_stream.rs @@ -75,9 +75,17 @@ async fn chat_mode_stream_cli() { server.verify().await; // Verify a new session rollout was created and is discoverable via list_conversations - let page = RolloutRecorder::list_conversations(home.path(), 10, None, &[]) - .await - .expect("list conversations"); + let provider_filter = vec!["mock".to_string()]; + let page = RolloutRecorder::list_conversations( + home.path(), + 10, + None, + &[], + Some(provider_filter.as_slice()), + "mock", + ) + .await + .expect("list conversations"); assert!( !page.items.is_empty(), "expected at least one session to be listed" diff --git a/codex-rs/core/tests/suite/client.rs b/codex-rs/core/tests/suite/client.rs index 8730f1f38f7..e302a1c1142 100644 --- a/codex-rs/core/tests/suite/client.rs +++ b/codex-rs/core/tests/suite/client.rs @@ -154,7 +154,8 @@ async fn resume_includes_initial_messages_and_sends_prior_items() { "instructions": "be nice", "cwd": ".", "originator": "test_originator", - "cli_version": "test_version" + "cli_version": "test_version", + "model_provider": "test-provider" } }) ) diff --git a/codex-rs/core/tests/suite/review.rs b/codex-rs/core/tests/suite/review.rs index 422acd922f7..d4c9f66d810 100644 --- a/codex-rs/core/tests/suite/review.rs +++ b/codex-rs/core/tests/suite/review.rs @@ -375,7 +375,8 @@ async fn review_input_isolated_from_parent_history() { "instructions": null, "cwd": ".", "originator": "test_originator", - "cli_version": "test_version" + "cli_version": "test_version", + "model_provider": "test-provider" } }); f.write_all(format!("{meta_line}\n").as_bytes()) diff --git a/codex-rs/core/tests/suite/rollout_list_find.rs b/codex-rs/core/tests/suite/rollout_list_find.rs index a38f0565c0f..1d40718d4b2 100644 --- a/codex-rs/core/tests/suite/rollout_list_find.rs +++ b/codex-rs/core/tests/suite/rollout_list_find.rs @@ -28,7 +28,8 @@ fn write_minimal_rollout_with_id(codex_home: &Path, id: Uuid) -> PathBuf { "instructions": null, "cwd": ".", "originator": "test", - "cli_version": "test" + "cli_version": "test", + "model_provider": "test-provider" } }) ) diff --git a/codex-rs/exec/src/lib.rs b/codex-rs/exec/src/lib.rs index 2e615df0c6b..10a43761616 100644 --- a/codex-rs/exec/src/lib.rs +++ b/codex-rs/exec/src/lib.rs @@ -389,8 +389,16 @@ async fn resolve_resume_path( args: &crate::cli::ResumeArgs, ) -> anyhow::Result> { if args.last { - match codex_core::RolloutRecorder::list_conversations(&config.codex_home, 1, None, &[]) - .await + let default_provider_filter = vec![config.model_provider_id.clone()]; + match codex_core::RolloutRecorder::list_conversations( + &config.codex_home, + 1, + None, + &[], + Some(default_provider_filter.as_slice()), + &config.model_provider_id, + ) + .await { Ok(page) => Ok(page.items.first().map(|it| it.path.clone())), Err(e) => { diff --git a/codex-rs/protocol/src/protocol.rs b/codex-rs/protocol/src/protocol.rs index 2334561e0e7..eb1aa03c353 100644 --- a/codex-rs/protocol/src/protocol.rs +++ b/codex-rs/protocol/src/protocol.rs @@ -941,6 +941,7 @@ pub struct SessionMeta { pub instructions: Option, #[serde(default)] pub source: SessionSource, + pub model_provider: Option, } impl Default for SessionMeta { @@ -953,6 +954,7 @@ impl Default for SessionMeta { cli_version: String::new(), instructions: None, source: SessionSource::default(), + model_provider: None, } } } diff --git a/codex-rs/tui/src/lib.rs b/codex-rs/tui/src/lib.rs index 50f6b1a0ddf..9030b7f6659 100644 --- a/codex-rs/tui/src/lib.rs +++ b/codex-rs/tui/src/lib.rs @@ -391,11 +391,14 @@ async fn run_ratatui_app( } } } else if cli.resume_last { + let provider_filter = vec![config.model_provider_id.clone()]; match RolloutRecorder::list_conversations( &config.codex_home, 1, None, INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + &config.model_provider_id, ) .await { @@ -407,7 +410,13 @@ async fn run_ratatui_app( Err(_) => resume_picker::ResumeSelection::StartFresh, } } else if cli.resume_picker { - match resume_picker::run_resume_picker(&mut tui, &config.codex_home).await? { + match resume_picker::run_resume_picker( + &mut tui, + &config.codex_home, + &config.model_provider_id, + ) + .await? + { resume_picker::ResumeSelection::Exit => { restore(); session_log::log_session_end(); diff --git a/codex-rs/tui/src/resume_picker.rs b/codex-rs/tui/src/resume_picker.rs index b735ff93742..3104b32735f 100644 --- a/codex-rs/tui/src/resume_picker.rs +++ b/codex-rs/tui/src/resume_picker.rs @@ -49,6 +49,7 @@ struct PageLoadRequest { cursor: Option, request_token: usize, search_token: Option, + default_provider: String, } type PageLoader = Arc; @@ -64,19 +65,28 @@ enum BackgroundEvent { /// Interactive session picker that lists recorded rollout files with simple /// search and pagination. Shows the first user input as the preview, relative /// time (e.g., "5 seconds ago"), and the absolute path. -pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result { +pub async fn run_resume_picker( + tui: &mut Tui, + codex_home: &Path, + default_provider: &str, +) -> Result { let alt = AltScreenGuard::enter(tui); let (bg_tx, bg_rx) = mpsc::unbounded_channel(); + let default_provider = default_provider.to_string(); + let loader_tx = bg_tx.clone(); let page_loader: PageLoader = Arc::new(move |request: PageLoadRequest| { let tx = loader_tx.clone(); tokio::spawn(async move { + let provider_filter = vec![request.default_provider.clone()]; let page = RolloutRecorder::list_conversations( &request.codex_home, PAGE_SIZE, request.cursor.as_ref(), INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + request.default_provider.as_str(), ) .await; let _ = tx.send(BackgroundEvent::PageLoaded { @@ -91,6 +101,7 @@ pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result, + default_provider: String, } struct PaginationState { @@ -225,7 +237,12 @@ struct Row { } impl PickerState { - fn new(codex_home: PathBuf, requester: FrameRequester, page_loader: PageLoader) -> Self { + fn new( + codex_home: PathBuf, + requester: FrameRequester, + page_loader: PageLoader, + default_provider: String, + ) -> Self { Self { codex_home, requester, @@ -246,6 +263,7 @@ impl PickerState { next_search_token: 0, page_loader, view_rows: None, + default_provider, } } @@ -324,11 +342,14 @@ impl PickerState { } async fn load_initial_page(&mut self) -> Result<()> { + let provider_filter = vec![self.default_provider.clone()]; let page = RolloutRecorder::list_conversations( &self.codex_home, PAGE_SIZE, None, INTERACTIVE_SESSION_SOURCES, + Some(provider_filter.as_slice()), + self.default_provider.as_str(), ) .await?; self.reset_pagination(); @@ -552,6 +573,7 @@ impl PickerState { cursor: Some(cursor), request_token, search_token, + default_provider: self.default_provider.clone(), }); } @@ -1061,8 +1083,12 @@ mod tests { use ratatui::layout::Layout; let loader: PageLoader = Arc::new(|_| {}); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); let now = Utc::now(); let rows = vec![ @@ -1117,8 +1143,12 @@ mod tests { #[test] fn pageless_scrolling_deduplicates_and_keeps_order() { let loader: PageLoader = Arc::new(|_| {}); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); state.reset_pagination(); state.ingest_page(page( @@ -1179,8 +1209,12 @@ mod tests { request_sink.lock().unwrap().push(req); }); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); state.reset_pagination(); state.ingest_page(page( vec![ @@ -1204,8 +1238,12 @@ mod tests { #[test] fn page_navigation_uses_view_rows() { let loader: PageLoader = Arc::new(|_| {}); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); let mut items = Vec::new(); for idx in 0..20 { @@ -1248,8 +1286,12 @@ mod tests { #[test] fn up_at_bottom_does_not_scroll_when_visible() { let loader: PageLoader = Arc::new(|_| {}); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); let mut items = Vec::new(); for idx in 0..10 { @@ -1288,8 +1330,12 @@ mod tests { request_sink.lock().unwrap().push(req); }); - let mut state = - PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); + let mut state = PickerState::new( + PathBuf::from("/tmp"), + FrameRequester::test_dummy(), + loader, + String::from("openai"), + ); state.reset_pagination(); state.ingest_page(page( vec![make_item(