|
| 1 | +use anyhow::{bail, Context, Result}; |
| 2 | +use serde_json::Value; |
| 3 | +use std::fs; |
| 4 | +use std::path::{Path, PathBuf}; |
| 5 | +use std::process::{Command, Stdio}; |
| 6 | +use std::thread; |
| 7 | +use std::time::{Duration, Instant}; |
| 8 | + |
| 9 | +#[derive(Debug)] |
| 10 | +pub struct AnalyzeOptions { |
| 11 | + pub engine: String, |
| 12 | + pub engine_pre_args: Vec<String>, |
| 13 | + pub engine_exec_args: Vec<String>, |
| 14 | + pub thinking_mode: Option<String>, |
| 15 | + pub timeout_minutes: u64, |
| 16 | +} |
| 17 | + |
| 18 | +#[derive(Debug)] |
| 19 | +pub struct AnalyzeResult { |
| 20 | + pub modified_files: Vec<String>, |
| 21 | + pub chunks: usize, |
| 22 | + pub chunk_size: usize, |
| 23 | + pub timed_out_chunks: u64, |
| 24 | + pub failed_chunks: u64, |
| 25 | + pub report: String, |
| 26 | + pub latest_path: String, |
| 27 | + pub history_path: String, |
| 28 | +} |
| 29 | + |
| 30 | +#[derive(Debug)] |
| 31 | +struct EngineExecRun { |
| 32 | + report: String, |
| 33 | + exit_code: Option<i32>, |
| 34 | + timed_out: bool, |
| 35 | +} |
| 36 | + |
| 37 | +pub fn list_modified_files(cwd: &Path) -> Result<Vec<String>> { |
| 38 | + let output = Command::new("git") |
| 39 | + .args(["diff", "--name-only"]) |
| 40 | + .current_dir(cwd) |
| 41 | + .output() |
| 42 | + .context("failed to list modified files with git")?; |
| 43 | + if !output.status.success() { |
| 44 | + bail!( |
| 45 | + "git diff failed: {}", |
| 46 | + String::from_utf8_lossy(&output.stderr).trim() |
| 47 | + ); |
| 48 | + } |
| 49 | + let files = String::from_utf8_lossy(&output.stdout) |
| 50 | + .lines() |
| 51 | + .map(str::trim) |
| 52 | + .filter(|s| !s.is_empty()) |
| 53 | + .map(ToString::to_string) |
| 54 | + .collect::<Vec<_>>(); |
| 55 | + Ok(files) |
| 56 | +} |
| 57 | + |
| 58 | +pub fn build_analyze_prompt(files: &[String], scope_label: &str) -> String { |
| 59 | + let mut out = String::from( |
| 60 | + "Analyze ONLY these modified files and report exactly:\n1) Critical risks\n2) High risks\n3) Medium risks\n4) Suggested next actions\nDo not propose edits, only analysis.\nEnd with: EXIT_SIGNAL: true\n\nScope: ", |
| 61 | + ); |
| 62 | + out.push_str(scope_label); |
| 63 | + out.push_str("\n\nModified files:\n"); |
| 64 | + for file in files { |
| 65 | + out.push_str("- "); |
| 66 | + out.push_str(file); |
| 67 | + out.push('\n'); |
| 68 | + } |
| 69 | + out |
| 70 | +} |
| 71 | + |
| 72 | +pub fn run_analyze_chunk( |
| 73 | + engine_cmd: &str, |
| 74 | + engine_pre_args: &[String], |
| 75 | + engine_exec_args: &[String], |
| 76 | + cwd: &Path, |
| 77 | + prompt: &str, |
| 78 | + timeout_minutes: u64, |
| 79 | +) -> Result<EngineExecRun> { |
| 80 | + let mut args = engine_pre_args.to_vec(); |
| 81 | + args.push("exec".to_string()); |
| 82 | + args.extend(engine_exec_args.iter().cloned()); |
| 83 | + args.push("--json".to_string()); |
| 84 | + args.push(prompt.to_string()); |
| 85 | + |
| 86 | + let timeout = Duration::from_secs(timeout_minutes.saturating_mul(60)); |
| 87 | + let mut child = Command::new(engine_cmd) |
| 88 | + .args(&args) |
| 89 | + .current_dir(cwd) |
| 90 | + .stdout(Stdio::piped()) |
| 91 | + .stderr(Stdio::piped()) |
| 92 | + .spawn() |
| 93 | + .with_context(|| format!("failed to execute {}", engine_cmd))?; |
| 94 | + |
| 95 | + let started = Instant::now(); |
| 96 | + let mut timed_out = false; |
| 97 | + |
| 98 | + loop { |
| 99 | + if child.try_wait()?.is_some() { |
| 100 | + break; |
| 101 | + } |
| 102 | + if started.elapsed() >= timeout { |
| 103 | + timed_out = true; |
| 104 | + let _ = child.kill(); |
| 105 | + break; |
| 106 | + } |
| 107 | + thread::sleep(Duration::from_millis(200)); |
| 108 | + } |
| 109 | + |
| 110 | + let output = child |
| 111 | + .wait_with_output() |
| 112 | + .with_context(|| format!("failed waiting for {}", engine_cmd))?; |
| 113 | + |
| 114 | + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); |
| 115 | + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); |
| 116 | + let report = extract_last_agent_message(&stdout).unwrap_or_else(|| { |
| 117 | + let merged = format!("{} {}", stdout.trim(), stderr.trim()); |
| 118 | + merged.chars().take(4000).collect() |
| 119 | + }); |
| 120 | + |
| 121 | + Ok(EngineExecRun { |
| 122 | + report, |
| 123 | + exit_code: output.status.code(), |
| 124 | + timed_out, |
| 125 | + }) |
| 126 | +} |
| 127 | + |
| 128 | +pub fn persist_analyze_report( |
| 129 | + cwd: &Path, |
| 130 | + files: &[String], |
| 131 | + chunks: usize, |
| 132 | + chunk_size: usize, |
| 133 | + timed_out_chunks: u64, |
| 134 | + failed_chunks: u64, |
| 135 | + chunk_reports: &[String], |
| 136 | + report: &str, |
| 137 | +) -> Result<AnalyzePaths> { |
| 138 | + let analyze_dir = cwd.join(".forge").join("analyze"); |
| 139 | + let history_dir = analyze_dir.join("history"); |
| 140 | + fs::create_dir_all(&history_dir) |
| 141 | + .with_context(|| format!("failed to create {}", history_dir.display()))?; |
| 142 | + |
| 143 | + let now = epoch_now(); |
| 144 | + let payload = serde_json::json!({ |
| 145 | + "created_at_epoch": now, |
| 146 | + "modified_files": files.len(), |
| 147 | + "chunks": chunks, |
| 148 | + "chunk_size": chunk_size, |
| 149 | + "timed_out_chunks": timed_out_chunks, |
| 150 | + "failed_chunks": failed_chunks, |
| 151 | + "files": files, |
| 152 | + "chunk_reports": chunk_reports, |
| 153 | + "report": report, |
| 154 | + }); |
| 155 | + |
| 156 | + let latest_path = analyze_dir.join("latest.json"); |
| 157 | + fs::write(&latest_path, serde_json::to_string_pretty(&payload)?) |
| 158 | + .with_context(|| format!("failed to write {}", latest_path.display()))?; |
| 159 | + |
| 160 | + let history_path = history_dir.join(format!("{}.json", now)); |
| 161 | + fs::write(&history_path, serde_json::to_string_pretty(&payload)?) |
| 162 | + .with_context(|| format!("failed to write {}", history_path.display()))?; |
| 163 | + |
| 164 | + Ok(AnalyzePaths { |
| 165 | + latest_path: latest_path.display().to_string(), |
| 166 | + history_path: history_path.display().to_string(), |
| 167 | + }) |
| 168 | +} |
| 169 | + |
| 170 | +#[derive(Debug)] |
| 171 | +pub struct AnalyzePaths { |
| 172 | + pub latest_path: String, |
| 173 | + pub history_path: String, |
| 174 | +} |
| 175 | + |
| 176 | +pub fn load_latest_analyze_payload(cwd: &Path) -> Result<Value> { |
| 177 | + let path = cwd.join(".forge").join("analyze").join("latest.json"); |
| 178 | + if !path.exists() { |
| 179 | + bail!("latest analyze report not found at {}", path.display()); |
| 180 | + } |
| 181 | + let raw = |
| 182 | + fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?; |
| 183 | + let value: Value = serde_json::from_str(&raw) |
| 184 | + .with_context(|| format!("invalid json in {}", path.display()))?; |
| 185 | + Ok(value) |
| 186 | +} |
| 187 | + |
| 188 | +fn extract_last_agent_message(stdout: &str) -> Option<String> { |
| 189 | + let mut last = None; |
| 190 | + for line in stdout.lines() { |
| 191 | + let Ok(value) = serde_json::from_str::<Value>(line) else { |
| 192 | + continue; |
| 193 | + }; |
| 194 | + if value.get("type").and_then(|v| v.as_str()) != Some("item.completed") { |
| 195 | + continue; |
| 196 | + } |
| 197 | + let Some(item) = value.get("item") else { |
| 198 | + continue; |
| 199 | + }; |
| 200 | + if item.get("type").and_then(|v| v.as_str()) != Some("agent_message") { |
| 201 | + continue; |
| 202 | + } |
| 203 | + if let Some(text) = item.get("text").and_then(|v| v.as_str()) { |
| 204 | + last = Some(text.to_string()); |
| 205 | + } |
| 206 | + } |
| 207 | + last |
| 208 | +} |
| 209 | + |
| 210 | +fn epoch_now() -> u64 { |
| 211 | + std::time::SystemTime::now() |
| 212 | + .duration_since(std::time::UNIX_EPOCH) |
| 213 | + .unwrap_or(Duration::from_secs(0)) |
| 214 | + .as_secs() |
| 215 | +} |
| 216 | + |
| 217 | +#[cfg(test)] |
| 218 | +mod tests { |
| 219 | + use super::*; |
| 220 | + use tempfile::tempdir; |
| 221 | + |
| 222 | + #[test] |
| 223 | + fn list_modified_files_returns_empty_when_no_changes() { |
| 224 | + let dir = tempdir().expect("tempdir"); |
| 225 | + |
| 226 | + Command::new("git") |
| 227 | + .args(["init"]) |
| 228 | + .current_dir(dir.path()) |
| 229 | + .output() |
| 230 | + .expect("git init"); |
| 231 | + |
| 232 | + let files = list_modified_files(dir.path()).expect("list files"); |
| 233 | + assert!(files.is_empty()); |
| 234 | + } |
| 235 | + |
| 236 | + #[test] |
| 237 | + fn build_analyze_prompt_includes_files() { |
| 238 | + let files = vec!["src/main.rs".to_string(), "src/lib.rs".to_string()]; |
| 239 | + let prompt = build_analyze_prompt(&files, "chunk 1/2"); |
| 240 | + |
| 241 | + assert!(prompt.contains("src/main.rs")); |
| 242 | + assert!(prompt.contains("src/lib.rs")); |
| 243 | + assert!(prompt.contains("chunk 1/2")); |
| 244 | + assert!(prompt.contains("EXIT_SIGNAL: true")); |
| 245 | + } |
| 246 | + |
| 247 | + #[test] |
| 248 | + fn persist_analyze_report_creates_files() { |
| 249 | + let dir = tempdir().expect("tempdir"); |
| 250 | + let files = vec!["test.rs".to_string()]; |
| 251 | + let chunk_reports = vec!["Report 1".to_string()]; |
| 252 | + |
| 253 | + let paths = persist_analyze_report( |
| 254 | + dir.path(), |
| 255 | + &files, |
| 256 | + 1, |
| 257 | + 25, |
| 258 | + 0, |
| 259 | + 0, |
| 260 | + &chunk_reports, |
| 261 | + "Final report", |
| 262 | + ) |
| 263 | + .expect("persist"); |
| 264 | + |
| 265 | + assert!(PathBuf::from(&paths.latest_path).exists()); |
| 266 | + assert!(PathBuf::from(&paths.history_path).exists()); |
| 267 | + |
| 268 | + let content = fs::read_to_string(PathBuf::from(&paths.latest_path)).expect("read"); |
| 269 | + assert!(content.contains("test.rs")); |
| 270 | + assert!(content.contains("Final report")); |
| 271 | + } |
| 272 | + |
| 273 | + #[test] |
| 274 | + fn load_latest_analyze_payload_fails_when_missing() { |
| 275 | + let dir = tempdir().expect("tempdir"); |
| 276 | + let result = load_latest_analyze_payload(dir.path()); |
| 277 | + assert!(result.is_err()); |
| 278 | + } |
| 279 | + |
| 280 | + #[test] |
| 281 | + fn load_latest_analyze_payload_returns_json() { |
| 282 | + let dir = tempdir().expect("tempdir"); |
| 283 | + let analyze_dir = dir.path().join(".forge/analyze"); |
| 284 | + fs::create_dir_all(&analyze_dir).expect("create dir"); |
| 285 | + fs::write(analyze_dir.join("latest.json"), r#"{"test": "value"}"#).expect("write"); |
| 286 | + |
| 287 | + let payload = load_latest_analyze_payload(dir.path()).expect("load"); |
| 288 | + assert_eq!(payload.get("test").and_then(|v| v.as_str()), Some("value")); |
| 289 | + } |
| 290 | + |
| 291 | + #[test] |
| 292 | + fn extract_last_agent_message_extracts_text() { |
| 293 | + let stdout = |
| 294 | + r#"{"type":"item.completed","item":{"type":"agent_message","text":"Hello world"}}"#; |
| 295 | + let result = extract_last_agent_message(stdout); |
| 296 | + assert_eq!(result, Some("Hello world".to_string())); |
| 297 | + } |
| 298 | + |
| 299 | + #[test] |
| 300 | + fn extract_last_agent_message_returns_last() { |
| 301 | + let stdout = r#"{"type":"item.completed","item":{"type":"agent_message","text":"First"}} |
| 302 | +{"type":"item.completed","item":{"type":"agent_message","text":"Second"}}"#; |
| 303 | + let result = extract_last_agent_message(stdout); |
| 304 | + assert_eq!(result, Some("Second".to_string())); |
| 305 | + } |
| 306 | + |
| 307 | + #[test] |
| 308 | + fn extract_last_agent_message_ignores_non_agent() { |
| 309 | + let stdout = r#"{"type":"item.completed","item":{"type":"other","text":"Ignored"}}"#; |
| 310 | + let result = extract_last_agent_message(stdout); |
| 311 | + assert!(result.is_none()); |
| 312 | + } |
| 313 | +} |
0 commit comments