diff --git a/Cargo.lock b/Cargo.lock index 9a5975e6f..da954d25e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1141,6 +1141,8 @@ dependencies = [ "tempfile", "tokio", "toml", + "tracing", + "tracing-subscriber", "unicode-normalization", "ureq", "url", @@ -2106,6 +2108,15 @@ dependencies = [ "winapi", ] +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + [[package]] name = "memchr" version = "2.7.6" @@ -2219,6 +2230,15 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9737e026353e5cd0736f98eddae28665118eb6f6600902a7f50db585621fecb6" +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "num-conv" version = "0.2.1" @@ -3065,6 +3085,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shlex" version = "1.3.0" @@ -3336,6 +3365,15 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + [[package]] name = "time" version = "0.3.47" @@ -3454,6 +3492,67 @@ version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + [[package]] name = "typed-path" version = "0.12.3" @@ -3577,6 +3676,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vcpkg" version = "0.2.15" diff --git a/Cargo.toml b/Cargo.toml index 1019218ca..fc30bf5e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,8 @@ indicatif = "0.18" smol = "2.0" futures = "0.3" tokio = { version = "1.51", features = ["rt-multi-thread", "sync", "time", "macros"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } interprocess = "2.4" rusqlite = { version = "0.31", features = ["bundled"] } libc = "0.2" diff --git a/docs/superpowers/plans/2026-04-14-daemon-logging.md b/docs/superpowers/plans/2026-04-14-daemon-logging.md new file mode 100644 index 000000000..0497e35ae --- /dev/null +++ b/docs/superpowers/plans/2026-04-14-daemon-logging.md @@ -0,0 +1,690 @@ +# Daemon Production Logging Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Migrate the entire codebase from `debug_log()` to `tracing` with proper log levels, adding production-visible logging for daemon operations and a custom Sentry-forwarding Layer. + +**Architecture:** Add `tracing` + `tracing-subscriber` crates. Initialize a subscriber in the daemon with an `EnvFilter` (default `info`, `debug` when `GIT_AI_DEBUG=1`), a `fmt::Layer` writing to stderr (which is dup2'd to the daemon log file), and a custom `SentryLayer` that intercepts ERROR events and routes them to the existing telemetry worker. Convert all 581 `debug_log()` calls codebase-wide, promoting key events to info/warn/error levels. + +**Tech Stack:** Rust, `tracing 0.1`, `tracing-subscriber 0.3` (with `env-filter` feature) + +--- + +### Task 1: Add tracing dependencies to Cargo.toml + +**Files:** +- Modify: `Cargo.toml` + +- [ ] **Step 1: Add tracing and tracing-subscriber dependencies** + +Add these two lines to the `[dependencies]` section of `Cargo.toml`, after the existing `tokio` line: + +```toml +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } +``` + +- [ ] **Step 2: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully (warnings are OK) + +- [ ] **Step 3: Commit** + +```bash +git add Cargo.toml Cargo.lock +git commit -m "feat(logging): add tracing and tracing-subscriber dependencies" +``` + +--- + +### Task 2: Create the custom SentryLayer + +**Files:** +- Create: `src/daemon/sentry_layer.rs` +- Modify: `src/daemon/mod.rs` (or the inline module declarations in `src/daemon.rs` if that's how submodules are declared) + +This Layer intercepts ERROR-level tracing events and forwards them to the existing `TelemetryEnvelope::Error` pipeline via `submit_daemon_internal_telemetry()`. + +- [ ] **Step 1: Check how daemon submodules are declared** + +Read `src/daemon.rs` lines 1-30 to find the module declarations (e.g., `mod control_api;`, `mod telemetry_worker;`). The new module `sentry_layer` will be added alongside them. + +- [ ] **Step 2: Create `src/daemon/sentry_layer.rs`** + +```rust +//! Custom tracing Layer that forwards ERROR-level events to Sentry +//! via the existing daemon telemetry worker pipeline. + +use tracing::field::{Field, Visit}; +use tracing::{Event, Level, Subscriber}; +use tracing_subscriber::layer::{Context, Layer}; + +/// A tracing Layer that intercepts ERROR-level events and routes them +/// to the daemon's telemetry worker as `TelemetryEnvelope::Error` events, +/// which get forwarded to both enterprise and OSS Sentry DSNs. +pub struct SentryLayer; + +struct MessageVisitor { + message: String, + fields: serde_json::Map, +} + +impl MessageVisitor { + fn new() -> Self { + Self { + message: String::new(), + fields: serde_json::Map::new(), + } + } +} + +impl Visit for MessageVisitor { + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + if field.name() == "message" { + self.message = format!("{:?}", value); + } else { + self.fields.insert( + field.name().to_string(), + serde_json::Value::String(format!("{:?}", value)), + ); + } + } + + fn record_str(&mut self, field: &Field, value: &str) { + if field.name() == "message" { + self.message = value.to_string(); + } else { + self.fields.insert( + field.name().to_string(), + serde_json::Value::String(value.to_string()), + ); + } + } +} + +impl Layer for SentryLayer { + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + if *event.metadata().level() != Level::ERROR { + return; + } + + let mut visitor = MessageVisitor::new(); + event.record(&mut visitor); + + let context = if visitor.fields.is_empty() { + None + } else { + Some(serde_json::Value::Object(visitor.fields)) + }; + + let envelope = crate::daemon::control_api::TelemetryEnvelope::Error { + timestamp: chrono::Utc::now().to_rfc3339(), + message: visitor.message, + context, + }; + + crate::daemon::telemetry_worker::submit_daemon_internal_telemetry(vec![envelope]); + } +} +``` + +- [ ] **Step 3: Add the module declaration** + +In the file where daemon submodules are declared, add: + +```rust +pub mod sentry_layer; +``` + +- [ ] **Step 4: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully + +- [ ] **Step 5: Commit** + +```bash +git add src/daemon/sentry_layer.rs src/daemon.rs +git commit -m "feat(logging): add custom SentryLayer for tracing error forwarding" +``` + +--- + +### Task 3: Initialize tracing subscriber in daemon startup + +**Files:** +- Modify: `src/daemon.rs` (the `run_daemon()` function, around line 7681) + +The subscriber must be initialized BEFORE `maybe_setup_daemon_log_file()` sets up the dup2 redirect, so the fmt Layer captures the stderr handle (fd 2). After dup2, writes through that handle go to the log file. + +- [ ] **Step 1: Add tracing initialization to `run_daemon()`** + +In `src/daemon.rs`, in the `run_daemon()` function, add the subscriber setup right after `write_pid_metadata(&config)?;` and before `let _log_guard = maybe_setup_daemon_log_file(&config);` (around line 7695-7696). + +Add these imports at the top of the function or in the file's import section: + +```rust +use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt}; +``` + +Then insert the initialization: + +```rust + // Initialize tracing subscriber before log file redirect so the fmt layer + // captures stderr (fd 2). After dup2, writes go to the daemon log file. + let env_filter = if std::env::var("GIT_AI_DEBUG").as_deref() == Ok("1") { + EnvFilter::new("debug") + } else { + EnvFilter::try_from_default_env() + .unwrap_or_else(|_| EnvFilter::new("info")) + }; + + tracing_subscriber::registry() + .with(env_filter) + .with( + tracing_subscriber::fmt::layer() + .with_target(false) + .with_thread_ids(false) + .with_ansi(false), + ) + .with(crate::daemon::sentry_layer::SentryLayer) + .init(); +``` + +Note: `.with_ansi(false)` because the output goes to a log file, not a terminal. + +- [ ] **Step 2: Add daemon lifecycle log lines** + +Immediately after the subscriber init and the dup2 redirect (`let _log_guard = maybe_setup_daemon_log_file(&config);`), add: + +```rust + tracing::info!( + pid = std::process::id(), + version = env!("CARGO_PKG_VERSION"), + os = std::env::consts::OS, + arch = std::env::consts::ARCH, + "daemon started" + ); +``` + +At the end of `run_daemon()`, just before `Ok(())`, add: + +```rust + tracing::info!("daemon shutdown complete"); +``` + +- [ ] **Step 3: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully + +- [ ] **Step 4: Commit** + +```bash +git add src/daemon.rs +git commit -m "feat(logging): initialize tracing subscriber in daemon startup" +``` + +--- + +### Task 4: Migrate daemon.rs debug_log() calls with level promotions + +**Files:** +- Modify: `src/daemon.rs` + +This is the largest task — converting all ~75 `debug_log()` calls in `daemon.rs`. Most become `tracing::debug!()`, but key events get promoted per the spec. + +- [ ] **Step 1: Replace all debug_log calls in daemon.rs** + +Apply these conversions throughout `daemon.rs`. The general pattern: + +**Replace `debug_log(&format!("...", args))` with `tracing::debug!("...", args)`** — note that tracing macros use `{}` format args directly, so `&format!(...)` is unnecessary. + +**Promotions to ERROR (panics, socket failures, log file setup):** + +These lines become `tracing::error!(...)`: + +- Line ~4611: `debug_log(&format!("daemon trace ingest error: {}", error))` → `tracing::error!(%error, "trace ingest error")` +- Line ~4637: `debug_log(&format!("daemon trace ingest panic: {}", panic_msg))` → `tracing::error!(panic_msg = %panic_msg, "trace ingest panic")` +- Line ~5444: `debug_log(&format!("daemon command side effect failed..."))` → `tracing::error!(family, seq = applied.seq, %error, "command side effect failed")` +- Line ~5464: `debug_log(&format!("daemon command apply failed..."))` → `tracing::error!(family, order, %error, "command apply failed")` +- Line ~5483: `debug_log(&format!("daemon command side effect panic..."))` → `tracing::error!(family, order, panic_msg = %panic_msg, "command side effect panic")` +- Line ~5563: `debug_log(&format!("daemon checkpoint side effect panic..."))` → `tracing::error!(family, order, panic_msg = %panic_msg, "checkpoint side effect panic")` +- Line ~7721: `debug_log(&format!("daemon control listener exited with error: {}", e))` → `tracing::error!(%e, "control listener exited with error")` +- Line ~7724: `debug_log("daemon control listener panicked")` → `tracing::error!("control listener panicked")` +- Line ~7740: `debug_log(&format!("daemon trace listener exited with error: {}", e))` → `tracing::error!(%e, "trace listener exited with error")` +- Line ~7743: `debug_log("daemon trace listener panicked")` → `tracing::error!("trace listener panicked")` + +Any line with `"daemon log file setup failed"` → `tracing::error!(...)` +Any line in the telemetry flush with panic → `tracing::error!(...)` + +**Promotions to WARN (update failures, pruning failures):** + +- Line ~7645: `debug_log(&format!("daemon update check failed: {}", err))` → `tracing::warn!(%err, "update check failed")` +- Line ~7688: `debug_log(&format!("daemon stale captured checkpoint pruning failed: {}", error))` → `tracing::warn!(%error, "stale captured checkpoint pruning failed")` +- Line ~5456: `debug_log(&format!("daemon command completion log write failed..."))` → `tracing::warn!(family, order, %error, "command completion log write failed")` + +**Promotions to INFO (updater, restart, lifecycle):** + +- Line ~7637: `debug_log("daemon update check: newer version available, requesting shutdown")` → `tracing::info!("update check: newer version available, requesting shutdown")` +- Line ~7642: `debug_log("daemon update check: no update needed")` → `tracing::info!("update check: no update needed")` +- Line ~7651: `debug_log("daemon uptime exceeded max, requesting restart")` → `tracing::info!("uptime exceeded max, requesting restart")` +- Line ~7670: `debug_log("daemon self-update: installation completed successfully")` → `tracing::info!("self-update: installation completed successfully")` +- Line ~7673: `debug_log("daemon self-update: no update to install")` → `tracing::info!("self-update: no update to install")` +- Line ~7676: `debug_log(&format!("daemon self-update: installation failed: {}", err))` → `tracing::warn!(%err, "self-update: installation failed")` + +**Everything else stays DEBUG:** + +All remaining `debug_log(...)` calls → `tracing::debug!(...)`. + +- [ ] **Step 2: Remove the `observability::log_error()` calls that duplicate error tracing** + +In daemon.rs, wherever there's a pattern of `debug_log(error) + observability::log_error(error, context)` at error/panic sites, the `observability::log_error()` call is now redundant because the `SentryLayer` automatically forwards `tracing::error!()` to Sentry. **However**, the `observability::log_error()` calls include structured `context` JSON with fields like `component`, `phase`, `reason`, `panic_message`. To preserve this context, include those fields as tracing event fields: + +```rust +// Before: +debug_log(&format!("daemon trace ingest panic: {}", panic_msg)); +observability::log_error(&error, Some(serde_json::json!({ + "component": "daemon", + "phase": "trace_ingest_worker", + "reason": "panic_in_ingest", + "panic_message": panic_msg, +}))); + +// After: +tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "panic_in_ingest", + panic_msg = %panic_msg, + "trace ingest panic" +); +// The SentryLayer picks up all fields and sends them as Sentry context. +// Remove the observability::log_error() call. +``` + +Apply this pattern to all ~16 `observability::log_error()` call sites in `daemon.rs`. + +- [ ] **Step 3: Remove `use crate::utils::debug_log;` import from daemon.rs** + +After converting all calls, remove the import. The compiler will tell you if any were missed. + +- [ ] **Step 4: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully (possibly with warnings about unused imports in other files) + +- [ ] **Step 5: Commit** + +```bash +git add src/daemon.rs +git commit -m "feat(logging): migrate daemon.rs from debug_log to tracing with level promotions" +``` + +--- + +### Task 5: Add new INFO-level log points for git write ops and checkpoints + +**Files:** +- Modify: `src/daemon.rs` + +Add the production-visible log points that don't currently exist: git write op pre/post pairs and concise checkpoint logging. + +- [ ] **Step 1: Add git write op INFO logging** + +In `maybe_apply_side_effects_for_applied_command()` (line ~6500), near the top after `let parsed_invocation = ...` (line ~6517), add INFO-level pre/post logging for write operations: + +```rust + // Log write operations at INFO level for production visibility. + let primary = cmd.primary_command.as_deref().unwrap_or("unknown"); + let is_write_op = matches!( + primary, + "commit" | "rebase" | "merge" | "cherry-pick" | "am" | "stash" | "reset" | "push" + ); + if is_write_op && cmd.exit_code == 0 { + let repo_path = cmd.worktree.as_ref() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + let post_head = cmd.post_repo.as_ref() + .and_then(|r| r.head.clone()) + .unwrap_or_default(); + tracing::info!( + op = primary, + repo = %repo_path, + new_head = %post_head, + "git write op completed" + ); + } +``` + +Note: We log at the end of side-effect application (post) with the result, rather than pre/post pairs, to keep it to one line. The pre-state is implicit (it's the command being processed). If separate pre/post is desired, add the pre log before `self.coordinator.route_command()` in the `FamilySequencerEntry::ReadyCommand` branch. + +- [ ] **Step 2: Add checkpoint INFO logging** + +In the `FamilySequencerEntry::Checkpoint` branch (line ~5501), add concise pre/post logging around `apply_checkpoint_side_effect()`: + +Before the checkpoint execution (around line 5537, before the `catch_unwind`): + +```rust + let checkpoint_kind_str = match request.as_ref() { + CheckpointRunRequest::Live(req) => req.kind.as_deref().unwrap_or("human"), + CheckpointRunRequest::Captured(_) => "captured", + }; + tracing::info!( + kind = checkpoint_kind_str, + repo = %repo_wd, + "checkpoint start" + ); + let checkpoint_start = std::time::Instant::now(); +``` + +After the checkpoint completes (after the `match checkpoint_result` block resolves `result`, around line 5586): + +```rust + let checkpoint_duration_ms = checkpoint_start.elapsed().as_millis(); + if result.is_ok() { + tracing::info!( + kind = checkpoint_kind_str, + repo = %repo_wd, + duration_ms = checkpoint_duration_ms, + "checkpoint done" + ); + } else { + tracing::warn!( + kind = checkpoint_kind_str, + repo = %repo_wd, + duration_ms = checkpoint_duration_ms, + "checkpoint failed" + ); + } +``` + +- [ ] **Step 3: Add shutdown reason logging** + +In `request_shutdown()` or wherever the coordinator's shutdown is triggered, and in the update check loop where shutdown is requested, add: + +```rust +// In daemon_update_check_loop, update-available path: +tracing::info!("shutdown requested: update available"); + +// In daemon_update_check_loop, max-uptime path: +tracing::info!("shutdown requested: uptime exceeded max"); +``` + +These replace the `debug_log` calls that were already promoted to info — just ensure the message clearly states the reason. + +- [ ] **Step 4: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully + +- [ ] **Step 5: Commit** + +```bash +git add src/daemon.rs +git commit -m "feat(logging): add INFO-level logging for git write ops, checkpoints, and shutdown reasons" +``` + +--- + +### Task 6: Migrate telemetry_worker.rs debug_log() calls + +**Files:** +- Modify: `src/daemon/telemetry_worker.rs` + +- [ ] **Step 1: Convert all debug_log calls in telemetry_worker.rs** + +There are 5 `debug_log()` calls in this file. Convert them: + +- `debug_log(&format!("telemetry flush task panicked: {}", e))` (line ~255) → `tracing::error!(%e, "telemetry flush task panicked")` +- `debug_log("daemon telemetry: skipping CAS flush, not logged in")` (line ~522) → `tracing::debug!("telemetry: skipping CAS flush, not logged in")` +- `debug_log(&format!("daemon telemetry: CAS parse error: {}", e))` (line ~534) → `tracing::warn!(%e, "telemetry: CAS parse error")` +- `debug_log(&format!("daemon telemetry: uploaded {} CAS objects", chunk.len()))` (line ~568) → `tracing::debug!(count = chunk.len(), "telemetry: uploaded CAS objects")` +- `debug_log(&format!("daemon telemetry: CAS upload error: {}", e))` (line ~573) → `tracing::warn!(%e, "telemetry: CAS upload error")` + +- [ ] **Step 2: Remove the `use crate::utils::debug_log;` import** + +- [ ] **Step 3: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully + +- [ ] **Step 4: Commit** + +```bash +git add src/daemon/telemetry_worker.rs +git commit -m "feat(logging): migrate telemetry_worker.rs from debug_log to tracing" +``` + +--- + +### Task 7: Migrate all remaining debug_log() calls codebase-wide + +**Files:** +- Modify: All 42 remaining files that use `debug_log()`, `debug_performance_log()`, or `debug_performance_log_structured()` + +This is a mechanical conversion — every remaining `debug_log(...)` becomes `tracing::debug!(...)`, every `debug_performance_log(...)` becomes `tracing::debug!(...)`, every `debug_performance_log_structured(json)` becomes `tracing::debug!(%json, "performance")`. + +- [ ] **Step 1: Convert authorship module files** + +Files (highest call counts first): +- `src/authorship/rebase_authorship.rs` — 43 `debug_log` + 9 `debug_performance_log` → all `tracing::debug!` +- `src/authorship/prompt_utils.rs` — 12 → `tracing::debug!` +- `src/authorship/virtual_attribution.rs` — 9 → `tracing::debug!` +- `src/authorship/range_authorship.rs` — 8 → `tracing::debug!` +- `src/authorship/post_commit.rs` — 6 → `tracing::debug!` +- `src/authorship/attribution_tracker.rs` — 3 → `tracing::debug!` +- `src/authorship/internal_db.rs` — 3 → `tracing::debug!` +- `src/authorship/pre_commit.rs` — 2 → `tracing::debug!` +- `src/authorship/git_ai_hooks.rs` — 9 → `tracing::debug!` +- `src/authorship/stats.rs` — 1 → `tracing::debug!` + +For each file: +1. Replace `debug_log(&format!("...", args))` with `tracing::debug!("...", args)` +2. Replace `debug_log("literal")` with `tracing::debug!("literal")` +3. Replace `debug_performance_log(&format!("...", args))` with `tracing::debug!("...", args)` +4. Remove `use crate::utils::debug_log;` and/or `use crate::utils::debug_performance_log;` + +- [ ] **Step 2: Convert commands/hooks module files** + +Files: +- `src/commands/hooks/cherry_pick_hooks.rs` — 46 → `tracing::debug!` +- `src/commands/hooks/rebase_hooks.rs` — 39 → `tracing::debug!` +- `src/commands/checkpoint.rs` — 37 → `tracing::debug!` +- `src/commands/checkpoint_agent/bash_tool.rs` — 38 → `tracing::debug!` +- `src/commands/hooks/stash_hooks.rs` — 23 → `tracing::debug!` +- `src/commands/hooks/reset_hooks.rs` — 23 → `tracing::debug!` +- `src/commands/hooks/fetch_hooks.rs` — 22 → `tracing::debug!` +- `src/commands/hooks/checkout_hooks.rs` — 13 → `tracing::debug!` +- `src/commands/hooks/switch_hooks.rs` — 11 → `tracing::debug!` +- `src/commands/ci_handlers.rs` — 10 → `tracing::debug!` +- `src/commands/hooks/push_hooks.rs` — 7 → `tracing::debug!` +- `src/commands/checkpoint_agent/agent_presets.rs` — 7 → `tracing::debug!` +- `src/commands/show_prompt.rs` — 6 → `tracing::debug!` +- `src/commands/git_handlers.rs` — 6 → `tracing::debug!` +- `src/commands/prompts_db.rs` — 10 → `tracing::debug!` +- `src/commands/daemon.rs` — 2 → `tracing::debug!` +- `src/commands/git_ai_handlers.rs` — 2 → `tracing::debug!` +- `src/commands/hooks/commit_hooks.rs` — 1 → `tracing::debug!` +- `src/commands/hooks/merge_hooks.rs` — 1 → `tracing::debug!` +- `src/commands/hooks/clone_hooks.rs` — 6 → `tracing::debug!` +- `src/commands/hooks/plumbing_rewrite_hooks.rs` — 1 → `tracing::debug!` +- `src/commands/hooks/update_ref_hooks.rs` — 2 → `tracing::debug!` +- `src/commands/git_hook_handlers.rs` — 1 → `tracing::debug!` +- `src/commands/checkpoint_agent/amp_preset.rs` — 1 → `tracing::debug!` +- `src/commands/checkpoint_agent/opencode_preset.rs` — 1 → `tracing::debug!` + +- [ ] **Step 3: Convert git module files** + +Files: +- `src/git/sync_authorship.rs` — 25 → `tracing::debug!` +- `src/git/repo_storage.rs` — 6 → `tracing::debug!` +- `src/git/refs.rs` — 4 → `tracing::debug!` +- `src/git/repository.rs` — 1 → `tracing::debug!` + +- [ ] **Step 4: Convert remaining files** + +Files: +- `src/mdm/agents/jetbrains.rs` — 5 → `tracing::debug!` +- `src/mdm/jetbrains/download.rs` — 6 → `tracing::debug!` +- `src/mdm/agents/cursor.rs` — 1 → `tracing::debug!` +- `src/mdm/agents/vscode.rs` — 1 → `tracing::debug!` +- `src/mdm/jetbrains/detection.rs` — 1 → `tracing::debug!` +- `src/mdm/utils.rs` — 1 → `tracing::debug!` +- `src/config.rs` — 1 → `tracing::debug!` +- `src/daemon/trace_normalizer.rs` — 0 `debug_log` but check for any +- `src/observability/wrapper_performance_targets.rs` — 4 `debug_performance_log` + 2 `debug_performance_log_structured` → `tracing::debug!` + +- [ ] **Step 5: Verify the project compiles with no debug_log references** + +Run: `cargo check 2>&1 | tail -10` +Expected: compiles successfully + +Then verify no debug_log calls remain: +Run: `grep -r "debug_log\|debug_performance_log" src/ --include="*.rs" | grep -v "^src/utils.rs" | grep -v "// " | grep -v "test" | head -20` +Expected: no matches (except possibly utils.rs itself and test code) + +- [ ] **Step 6: Commit** + +```bash +git add src/ +git commit -m "feat(logging): migrate all remaining debug_log calls to tracing codebase-wide" +``` + +--- + +### Task 8: Remove debug_log infrastructure from utils.rs + +**Files:** +- Modify: `src/utils.rs` + +- [ ] **Step 1: Remove the debug logging functions and supporting code** + +Remove from `src/utils.rs`: +- `static DEBUG_ENABLED: std::sync::OnceLock` (line 10) +- `static DEBUG_PERFORMANCE_LEVEL: std::sync::OnceLock` (line 11) +- `fn is_debug_enabled()` (lines 15-22) +- `fn is_debug_performance_enabled()` (lines 24-26) +- `fn debug_performance_level()` (lines 28-35) +- `pub fn debug_performance_log(msg: &str)` (lines 37-41) +- `pub fn debug_performance_log_structured(json: serde_json::Value)` (lines 43-47) +- `pub fn debug_log(msg: &str)` (lines 57-61) and its doc comment (lines 49-56) + +Keep `IS_TERMINAL`, `IS_IN_BACKGROUND_AGENT`, and all other functions in utils.rs. + +- [ ] **Step 2: Verify the project compiles cleanly** + +Run: `cargo check 2>&1 | tail -10` +Expected: compiles successfully with no errors related to missing `debug_log` + +- [ ] **Step 3: Verify no remaining references** + +Run: `grep -rn "utils::debug_log\|utils::debug_performance" src/ --include="*.rs" | head -10` +Expected: no matches + +- [ ] **Step 4: Commit** + +```bash +git add src/utils.rs +git commit -m "feat(logging): remove debug_log infrastructure from utils.rs" +``` + +--- + +### Task 9: Update SentryLayer to include context fields from observability::log_error patterns + +**Files:** +- Modify: `src/daemon/sentry_layer.rs` + +The SentryLayer needs to capture all tracing event fields (not just `message`) so that structured context like `component`, `phase`, `reason` flows through to Sentry. + +- [ ] **Step 1: Verify the SentryLayer captures all fields** + +Review `src/daemon/sentry_layer.rs` — the `MessageVisitor` already collects non-message fields into a `serde_json::Map` and passes them as `context` in the `TelemetryEnvelope::Error`. Verify this matches the pattern used in Task 4 Step 2. + +If additional field types need handling (e.g., `record_i64`, `record_u64`, `record_bool`), add them: + +```rust +impl Visit for MessageVisitor { + fn record_i64(&mut self, field: &Field, value: i64) { + self.fields.insert( + field.name().to_string(), + serde_json::Value::Number(value.into()), + ); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.fields.insert( + field.name().to_string(), + serde_json::Value::Number(value.into()), + ); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.fields.insert( + field.name().to_string(), + serde_json::Value::Bool(value), + ); + } + + // ... existing record_debug and record_str methods +} +``` + +- [ ] **Step 2: Verify the project compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: compiles successfully + +- [ ] **Step 3: Commit (if changes were needed)** + +```bash +git add src/daemon/sentry_layer.rs +git commit -m "feat(logging): ensure SentryLayer captures all field types for Sentry context" +``` + +--- + +### Task 10: Final verification and cleanup + +**Files:** +- All modified files + +- [ ] **Step 1: Run full compilation check** + +Run: `cargo check 2>&1 | tail -20` +Expected: compiles cleanly + +- [ ] **Step 2: Run the test suite** + +Run: `cargo test 2>&1 | tail -30` +Expected: all tests pass + +- [ ] **Step 3: Verify no debug_log or debug_performance_log references remain** + +Run: `grep -rn "debug_log\b\|debug_performance_log\b" src/ --include="*.rs" | grep -v "^Binary" | head -20` +Expected: no matches + +- [ ] **Step 4: Verify the daemon log format** + +Run a quick manual verification that the log output format matches the spec: + +```bash +cargo build 2>&1 | tail -3 +``` + +Verify the binary builds. A full E2E test of the daemon logging would require running the daemon, which is tested by existing integration tests. + +- [ ] **Step 5: Run existing integration tests** + +Run: `cargo test --test daemon_mode 2>&1 | tail -20` +Expected: tests pass (daemon integration tests exercise the daemon startup/shutdown path) + +- [ ] **Step 6: Final commit for any cleanup** + +If any cleanup was needed: +```bash +git add -A +git commit -m "chore(logging): final cleanup after tracing migration" +``` diff --git a/docs/superpowers/specs/2026-04-14-daemon-logging-design.md b/docs/superpowers/specs/2026-04-14-daemon-logging-design.md new file mode 100644 index 000000000..54c8ef4b3 --- /dev/null +++ b/docs/superpowers/specs/2026-04-14-daemon-logging-design.md @@ -0,0 +1,220 @@ +# Daemon Production Logging — Design Spec + +**Date:** 2026-04-14 +**Status:** Approved + +## Problem + +Outside of debug mode (`GIT_AI_DEBUG=1`), the daemon log file at +`~/.git-ai/internal/daemon/logs/{PID}.log` receives almost no output. All 581 +`debug_log()` calls across the codebase are gated behind `is_debug_enabled()`, +making production debugging nearly impossible. Errors, git operations, +checkpoints, auto-updater activity, and auto-restarts are invisible unless a +developer remembered to set the env var before the issue occurred. + +## Solution + +Migrate the entire codebase from the hand-rolled `debug_log()` system to the +`tracing` crate with proper log levels, a custom Sentry-forwarding Layer, and +structured production output. + +## Tracing Infrastructure + +### New Crates + +- `tracing = "0.1"` — macros and span/event types +- `tracing-subscriber = { version = "0.3", features = ["env-filter"] }` — + formatting + level filtering + +### Subscriber Setup + +Initialized in `run_daemon()` before `maybe_setup_daemon_log_file()`: + +``` +tracing_subscriber::registry() + .with(EnvFilter) // level gating + .with(fmt::Layer) // human-readable output to stderr + .with(SentryLayer) // routes errors to Sentry + .init(); +``` + +### Level Filtering + +- **Default:** `info` — production events (write ops, checkpoints, updater, + errors) +- **`GIT_AI_DEBUG=1`:** `debug` — equivalent to today's behavior +- **Override:** `RUST_LOG=git_ai::daemon=debug` for fine-grained control + +### Output Format + +Compact human-readable text (not JSON). Read by humans via `git-ai bg tail`. + +``` +2026-04-14T10:32:01Z INFO daemon started pid=12345 version=1.3.0 +2026-04-14T10:32:01Z INFO update check: no update available +2026-04-14T10:45:12Z INFO commit pre repo=/home/user/project ref=HEAD +2026-04-14T10:45:12Z INFO commit post repo=/home/user/project ref=abc1234 +2026-04-14T10:45:13Z INFO checkpoint kind=Human repo=/home/user/project +2026-04-14T11:32:01Z ERROR trace ingest panic: index out of bounds +``` + +### Log File Routing + +The existing `maybe_setup_daemon_log_file()` calls +`dup2(log_fd, STDERR_FILENO)` to redirect fd 2 to the log file. The tracing +`fmt::Layer` writes to stderr (fd 2). After dup2, writes go to the file. No +`tracing-appender` needed. The existing redirect mechanism works as-is. + +## Sentry Routing + +### Custom Tracing Layer + +A thin `tracing::Layer` impl (~50 lines) that intercepts ERROR-level events +and forwards them to the existing telemetry worker via +`TelemetryEnvelope::Error` → `submit_daemon_internal_telemetry()`. + +Every `tracing::error!(...)` in daemon code automatically goes to both the log +file (via fmt Layer) AND Sentry (via this Layer -> telemetry worker -> both +enterprise and OSS DSNs). No manual `send_telemetry_event()` needed at error +call sites. + +### What Stays in the Telemetry Worker + +- `TelemetryEnvelope::Error` — still needed for wrapper-process errors arriving + via control socket +- `TelemetryEnvelope::Performance` — explicit perf measurements, not log events +- `TelemetryEnvelope::Message` — PostHog routing +- `TelemetryEnvelope::Metrics` and CAS — unchanged + +### What Gets Removed from Daemon Code + +All manual `submit_daemon_internal_telemetry(vec![TelemetryEnvelope::Error +{...}])` calls that exist alongside `debug_log()` at error sites. The Layer +handles Sentry routing automatically now. + +### Future Path to Standard Sentry + +The tracing infrastructure is the foundation. When ready, the custom Layer can +be swapped for `sentry-tracing` and the custom `SentryClient` for the official +`sentry` crate — same subscriber, same tracing calls, just a different Layer. + +## Production Log Event Inventory + +### ERROR (log file + Sentry via Layer) + +- Panic catches: trace ingest, command side-effect, checkpoint side-effect +- Socket listener failures (control socket, trace socket exit with error) +- Log file setup failure +- Telemetry flush panic + +### WARN (log file only) + +- Update check HTTP/parse failure +- Stale checkpoint pruning failure +- CAS upload failure +- Metrics upload failure (before SQLite fallback) +- Telemetry envelope parse errors + +### INFO (log file only) + +**Lifecycle:** +- Daemon started (PID, version, platform) +- Daemon shutdown initiated (reason: signal / update / max-uptime) +- Daemon shutdown complete + +**Auto-restart:** +- `"uptime exceeded 24.5h, requesting restart"` when max-uptime triggers + +**Auto-updater (verbose — all steps):** +- Update check started +- Update check result: no update / newer version found (with version) +- Update download started +- Update install completed / failed +- Post-shutdown self-update: started, completed, failed + +**Git write ops (pre/post pairs):** +- commit, rebase, merge, cherry-pick, amend, stash, reset, push +- One line each: `{op} pre repo=` / `{op} post repo=` + +**Git read ops — NOT logged at INFO:** +- status, diff, log, show, fetch, checkout (no-write) — stay at DEBUG + +**Checkpoints (concise):** +- `checkpoint start kind= repo=` +- `checkpoint done kind= repo= duration_ms=` +- Two lines per checkpoint. Verbose state dumps stay at DEBUG. + +**Control socket:** +- Connection accepted from new family (first time a repo connects) +- Not per-message + +### DEBUG (log file only when GIT_AI_DEBUG=1) + +- All existing `debug_log()` content (581 calls converted as-is) +- Detailed checkpoint state / watermark updates +- Trace event normalization details +- Family actor state transitions +- Git read ops +- CAS upload success details +- Performance logging (replaces `debug_performance_log`) + +## debug_log() Migration + +### Scope + +Codebase-wide. All 581 `debug_log()` calls across 44 files, plus 15 +`debug_performance_log()` and 2 `debug_performance_log_structured()` calls. +One call pattern across the entire codebase. + +### Conversion Rules + +| Current | New | +|---------|-----| +| `debug_log(...)` at error sites (panics, socket failures) | `tracing::error!(...)` | +| `debug_log(...)` at warning sites (update failures, pruning failures) | `tracing::warn!(...)` | +| `debug_log(...)` at key operation sites (updater, restart, lifecycle) | `tracing::info!(...)` | +| `debug_log(...)` everywhere else | `tracing::debug!(...)` | +| `debug_performance_log(...)` | `tracing::debug!(...)` | +| `debug_performance_log_structured(...)` | `tracing::debug!(...)` | + +### What Gets Removed + +- `debug_log()` function in `utils.rs` +- `debug_performance_log()` function in `utils.rs` +- `debug_performance_log_structured()` function in `utils.rs` +- `is_debug_enabled()`, `DEBUG_ENABLED` OnceLock +- `is_debug_performance_enabled()`, `debug_performance_level()`, + `DEBUG_PERFORMANCE_LEVEL` OnceLock + +### What Does NOT Change + +- `eprintln!()` calls in CLI handlers — these are user-facing terminal output, + not logging. They stay as-is. +- The `dup2` redirect mechanism in `maybe_setup_daemon_log_file()` +- Log file pruning (removes logs from dead PIDs older than 1 week) +- Log path: `~/.git-ai/internal/daemon/logs/{PID}.log` +- `git-ai bg tail` — continues to work as-is + +### Non-Daemon Code + +tracing macros are no-ops when no subscriber is installed. Non-daemon code +(CLI commands, wrapper processes) that calls `tracing::debug!(...)` will +simply produce no output — same behavior as today's `debug_log()` when +`GIT_AI_DEBUG` is unset. If a future PR wants those processes to log, it +just needs to initialize a subscriber there too. + +## Log File Management + +No changes to existing mechanism: +- `dup2` redirect stays +- Log pruning stays (1 week TTL for dead PIDs) +- Log path unchanged +- `git-ai bg tail` unchanged + +## Future Improvements (Out of Scope) + +- Migrate to official `sentry` crate with `sentry-tracing` Layer +- Convert `TelemetryEnvelope::Performance` to tracing spans +- Add tracing subscriber initialization for non-daemon processes +- Structured JSON log output option +- Log rotation by size diff --git a/src/authorship/attribution_tracker.rs b/src/authorship/attribution_tracker.rs index 6e41c3781..fbadd4de1 100644 --- a/src/authorship/attribution_tracker.rs +++ b/src/authorship/attribution_tracker.rs @@ -7,7 +7,6 @@ use crate::authorship::imara_diff_utils::{ByteDiff, ByteDiffOp, DiffOp, capture_ use crate::authorship::move_detection::{DeletedLine, InsertedLine, detect_moves}; use crate::authorship::working_log::CheckpointKind; use crate::error::GitAiError; -use crate::utils::debug_log; use std::cmp::Ordering; use std::collections::HashMap; use std::hash::{Hash, Hasher}; @@ -309,10 +308,10 @@ impl AttributionTracker { let line_metadata_start = Instant::now(); let old_lines = collect_line_metadata(old_content); let new_lines = collect_line_metadata(new_content); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] collect_line_metadata (old/new) took {:?}", line_metadata_start.elapsed() - )); + ); let capture_start = Instant::now(); let old_line_slices: Vec<&str> = old_lines @@ -326,11 +325,11 @@ impl AttributionTracker { let line_ops = capture_diff_slices(&old_line_slices, &new_line_slices); let line_ops_len = line_ops.len(); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] capture_diff_slices produced {} ops in {:?}", line_ops_len, capture_start.elapsed() - )); + ); let mut computation = DiffComputation::default(); let mut pending_changed: Vec = Vec::new(); @@ -370,12 +369,12 @@ impl AttributionTracker { } computation.substantive_new_ranges = merge_ranges(computation.substantive_new_ranges); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] compute_diffs processed {} ops in {:?} (total {:?})", line_ops_len, process_start.elapsed(), compute_start.elapsed() - )); + ); Ok(computation) } diff --git a/src/authorship/git_ai_hooks.rs b/src/authorship/git_ai_hooks.rs index 4c9af12a8..4808e230b 100644 --- a/src/authorship/git_ai_hooks.rs +++ b/src/authorship/git_ai_hooks.rs @@ -5,7 +5,6 @@ use crate::config::Config; use crate::git::repository::Repository; #[cfg(windows)] use crate::utils::CREATE_NO_WINDOW; -use crate::utils::debug_log; #[cfg(windows)] use std::os::windows::process::CommandExt; @@ -59,10 +58,10 @@ pub fn post_notes_updated(repo: &Repository, notes: &[(String, String)]) { let payload_json = match serde_json::to_string(&payload) { Ok(json) => json, Err(e) => { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Failed to serialize post_notes_updated payload: {}", e - )); + ); return; } }; @@ -72,10 +71,11 @@ pub fn post_notes_updated(repo: &Repository, notes: &[(String, String)]) { let mut child = match spawn_shell_command(&hook_command) { Ok(child) => child, Err(e) => { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Failed to spawn post_notes_updated hook '{}': {}", - hook_command, e - )); + hook_command, + e + ); continue; } }; @@ -86,17 +86,18 @@ pub fn post_notes_updated(repo: &Repository, notes: &[(String, String)]) { std::thread::spawn(move || { use std::io::Write; if let Err(e) = stdin.write_all(payload_for_stdin.as_bytes()) { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Failed to write post_notes_updated stdin for '{}': {}", - command_for_log, e - )); + command_for_log, + e + ); } }); } else { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Hook '{}' was spawned without a stdin pipe", hook_command - )); + ); } running_children.push((hook_command, child)); @@ -123,18 +124,16 @@ fn wait_for_hooks_or_detach(mut children: Vec<(String, Child)>) { match child.try_wait() { Ok(Some(status)) => { if !status.success() { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Hook '{}' exited with status {}", - command, status - )); + command, + status + ); } } Ok(None) => still_running.push((command, child)), Err(e) => { - debug_log(&format!( - "[git_ai_hooks] Failed to poll hook '{}': {}", - command, e - )); + tracing::debug!("[git_ai_hooks] Failed to poll hook '{}': {}", command, e); } } } @@ -145,27 +144,29 @@ fn wait_for_hooks_or_detach(mut children: Vec<(String, Child)>) { if Instant::now() >= deadline { let detached_count = still_running.len(); - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Detaching {} unfinished hook command(s) after {}ms", detached_count, HOOK_WAIT_TIMEOUT.as_millis() - )); + ); std::thread::spawn(move || { for (command, mut child) in still_running { match child.wait() { Ok(status) => { if !status.success() { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Detached hook '{}' exited with status {}", - command, status - )); + command, + status + ); } } Err(e) => { - debug_log(&format!( + tracing::debug!( "[git_ai_hooks] Failed waiting detached hook '{}': {}", - command, e - )); + command, + e + ); } } } diff --git a/src/authorship/internal_db.rs b/src/authorship/internal_db.rs index 6e7141f71..42b7e16cb 100644 --- a/src/authorship/internal_db.rs +++ b/src/authorship/internal_db.rs @@ -2,7 +2,6 @@ use crate::authorship::authorship_log_serialization::generate_short_hash; use crate::authorship::transcript::AiTranscript; use crate::authorship::working_log::Checkpoint; use crate::error::GitAiError; -use crate::utils::debug_log; use dirs; use rusqlite::{Connection, params}; use std::collections::HashMap; @@ -316,7 +315,7 @@ impl InternalDatabase { pub fn warmup() { std::thread::spawn(|| { if let Err(e) = Self::global() { - debug_log(&format!("DB warmup failed: {}", e)); + tracing::debug!("DB warmup failed: {}", e); } }); } @@ -427,11 +426,11 @@ impl InternalDatabase { // Step 3: Apply all missing migrations sequentially for target_version in current_version..SCHEMA_VERSION { - debug_log(&format!( + tracing::debug!( "[Migration] Upgrading database from version {} to {}", target_version, target_version + 1 - )); + ); // Apply the migration (FATAL on error) self.apply_migration(target_version)?; @@ -448,10 +447,10 @@ impl InternalDatabase { params![(target_version + 1).to_string()], )?; - debug_log(&format!( + tracing::debug!( "[Migration] Successfully upgraded to version {}", target_version + 1 - )); + ); } // Step 5: Verify final version matches expected diff --git a/src/authorship/post_commit.rs b/src/authorship/post_commit.rs index 263893fa7..f410d74a6 100644 --- a/src/authorship/post_commit.rs +++ b/src/authorship/post_commit.rs @@ -12,7 +12,6 @@ use crate::config::{Config, PromptStorageMode}; use crate::error::GitAiError; use crate::git::refs::notes_add; use crate::git::repository::Repository; -use crate::utils::debug_log; use std::collections::{HashMap, HashSet}; use std::io::IsTerminal; @@ -97,10 +96,10 @@ pub fn post_commit_with_final_state( // Batch upsert all prompts to database after refreshing (non-fatal if it fails) if let Err(e) = batch_upsert_prompts_to_db(&parent_working_log, &working_log, &commit_sha) { - debug_log(&format!( + tracing::debug!( "[Warning] Failed to batch upsert prompts to database: {}", e - )); + ); crate::observability::log_error( &e, Some(serde_json::json!({ @@ -185,7 +184,7 @@ pub fn post_commit_with_final_state( // Store in notes: redact secrets but keep messages in notes let count = redact_secrets_from_prompts(&mut authorship_log.metadata.prompts); if count > 0 { - debug_log(&format!("Redacted {} secrets from prompts", count)); + tracing::debug!("Redacted {} secrets from prompts", count); } } PromptStorageMode::Default => { @@ -202,19 +201,16 @@ pub fn post_commit_with_final_state( let redaction_count = redact_secrets_from_prompts(&mut authorship_log.metadata.prompts); if redaction_count > 0 { - debug_log(&format!( + tracing::debug!( "Redacted {} secrets from prompts before CAS upload", redaction_count - )); + ); } if let Err(e) = enqueue_prompt_messages_to_cas(repo, &mut authorship_log.metadata.prompts) { - debug_log(&format!( - "[Warning] Failed to enqueue prompt messages to CAS: {}", - e - )); + tracing::debug!("[Warning] Failed to enqueue prompt messages to CAS: {}", e); // Enqueue failed - still strip messages (never keep in notes for "default") strip_prompt_messages(&mut authorship_log.metadata.prompts); } @@ -271,20 +267,17 @@ pub fn post_commit_with_final_state( } else { match skip_reason.as_ref() { Some(StatsSkipReason::MergeCommit) => { - debug_log(&format!( - "Skipping post-commit stats for merge commit {}", - commit_sha - )); + tracing::debug!("Skipping post-commit stats for merge commit {}", commit_sha); } Some(StatsSkipReason::Expensive(estimate)) => { - debug_log(&format!( + tracing::debug!( "Skipping expensive post-commit stats for {} (files_with_additions={}, added_lines={}, deleted_lines={}, hunks={})", commit_sha, estimate.files_with_additions, estimate.added_lines, estimate.deleted_lines, estimate.hunk_ranges - )); + ); } None => {} } diff --git a/src/authorship/pre_commit.rs b/src/authorship/pre_commit.rs index 8bd46d7e3..123d764b1 100644 --- a/src/authorship/pre_commit.rs +++ b/src/authorship/pre_commit.rs @@ -2,8 +2,6 @@ use crate::authorship::working_log::CheckpointKind; use crate::commands::checkpoint_agent::agent_presets::AgentRunResult; use crate::error::GitAiError; use crate::git::repository::Repository; -use crate::utils::debug_log; - pub fn pre_commit(repo: &Repository, default_author: String) -> Result<(), GitAiError> { let (checkpoint_kind, agent_run_result) = pre_commit_checkpoint_context(repo); @@ -33,11 +31,11 @@ fn pre_commit_checkpoint_context(repo: &Repository) -> (CheckpointKind, Option update_pi_prompt(agent_metadata, current_model), "windsurf" => update_windsurf_prompt(agent_metadata, current_model), _ => { - debug_log(&format!("Unknown tool: {}", tool)); + tracing::debug!("Unknown tool: {}", tool); PromptUpdateResult::Unchanged } } @@ -201,10 +200,11 @@ fn update_codex_prompt( model.unwrap_or_else(|| current_model.to_string()), ), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Codex rollout JSONL transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -236,10 +236,11 @@ fn update_cursor_prompt( PromptUpdateResult::Updated(transcript, current_model.to_string()) } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Cursor JSONL transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -277,10 +278,11 @@ fn update_claude_prompt( ) } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Claude JSONL transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -320,10 +322,11 @@ fn update_gemini_prompt( ) } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Gemini JSON transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -365,10 +368,11 @@ fn update_github_copilot_prompt( ) } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse GitHub Copilot chat session JSON from {}: {}", - chat_session_path, e - )); + chat_session_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -406,10 +410,11 @@ fn update_continue_cli_prompt( PromptUpdateResult::Updated(transcript, current_model.to_string()) } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Continue CLI JSON transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -442,10 +447,11 @@ fn update_droid_prompt( match DroidPreset::transcript_and_model_from_droid_jsonl(transcript_path) { Ok((transcript, _model)) => transcript, Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Droid JSONL transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -463,10 +469,11 @@ fn update_droid_prompt( Ok(Some(m)) => m, Ok(None) => current_model.to_string(), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Droid settings.json from {}: {}", - settings_path, e - )); + settings_path, + e + ); current_model.to_string() } } @@ -533,10 +540,11 @@ fn update_amp_prompt( model.unwrap_or_else(|| current_model.to_string()), ), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to fetch Amp transcript for thread {}: {}", - thread_id, e - )); + thread_id, + e + ); log_error( &e, Some(serde_json::json!({ @@ -576,10 +584,11 @@ fn update_opencode_prompt( model.unwrap_or_else(|| current_model.to_string()), ), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to fetch OpenCode transcript for session {}: {}", - session_id, e - )); + session_id, + e + ); log_error( &e, Some(serde_json::json!({ @@ -607,10 +616,11 @@ fn update_pi_prompt( model.unwrap_or_else(|| current_model.to_string()), ), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Pi session JSONL from {}: {}", - session_path, e - )); + session_path, + e + ); log_error( &e, Some(serde_json::json!({ @@ -639,10 +649,11 @@ fn update_windsurf_prompt( model.unwrap_or_else(|| current_model.to_string()), ), Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to parse Windsurf JSONL transcript from {}: {}", - transcript_path, e - )); + transcript_path, + e + ); log_error( &e, Some(serde_json::json!({ diff --git a/src/authorship/range_authorship.rs b/src/authorship/range_authorship.rs index ac7ad11af..6e775c5e2 100644 --- a/src/authorship/range_authorship.rs +++ b/src/authorship/range_authorship.rs @@ -10,8 +10,6 @@ use crate::authorship::stats::{CommitStats, stats_for_commit_stats, stats_from_a use crate::error::GitAiError; use crate::git::refs::{CommitAuthorship, get_commits_with_notes_from_list}; use crate::git::repository::{CommitRange, InternalGitProfile, Repository, exec_git_with_profile}; -use crate::utils::debug_log; - use std::io::IsTerminal; /// The git empty tree hash - represents an empty repository state @@ -100,7 +98,7 @@ pub fn range_authorship( ))); } - debug_log(&format!("✓ Fetched {} from {}", fetch_refspec, remote)); + tracing::debug!("Fetched {} from {}", fetch_refspec, remote); } // Clone commit_range before consuming it @@ -180,10 +178,11 @@ fn create_authorship_log_for_range( VirtualAttributions, merge_attributions_favoring_first, }; - debug_log(&format!( + tracing::debug!( "Calculating authorship log for range: {} -> {}", - start_sha, end_sha - )); + start_sha, + end_sha + ); // Step 1: Get list of changed files between the two commits let all_changed_files = repo.diff_changed_files(start_sha, end_sha)?; @@ -202,7 +201,7 @@ fn create_authorship_log_for_range( if changed_files.is_empty() { // No files changed, return empty authorship log - debug_log("No files changed in range"); + tracing::debug!("No files changed in range"); return Ok( crate::authorship::authorship_log_serialization::AuthorshipLog { attestations: Vec::new(), @@ -214,15 +213,15 @@ fn create_authorship_log_for_range( ); } - debug_log(&format!( + tracing::debug!( "Processing {} changed files for range authorship", changed_files.len() - )); + ); // Special handling for empty tree: there's no start state to compare against // We only need the end state's attributions if start_sha == EMPTY_TREE_HASH { - debug_log("Start is empty tree - using only end commit attributions"); + tracing::debug!("Start is empty tree - using only end commit attributions"); let repo_clone = repo.clone(); let mut end_va = smol::block_on(async { @@ -243,11 +242,11 @@ fn create_authorship_log_for_range( let mut authorship_log = end_va.to_authorship_log()?; authorship_log.metadata.base_commit_sha = end_sha.to_string(); - debug_log(&format!( + tracing::debug!( "Created authorship log with {} attestations, {} prompts", authorship_log.attestations.len(), authorship_log.metadata.prompts.len() - )); + ); return Ok(authorship_log); } @@ -291,10 +290,10 @@ fn create_authorship_log_for_range( // Step 4: Read committed files from end commit (final state) let committed_files = get_committed_files_content(repo, end_sha, &changed_files)?; - debug_log(&format!( + tracing::debug!( "Read {} committed files from end commit", committed_files.len() - )); + ); // Step 5: Merge VirtualAttributions, favoring end commit (newer state) let merged_va = merge_attributions_favoring_first(end_va, start_va, committed_files)?; @@ -303,11 +302,11 @@ fn create_authorship_log_for_range( let mut authorship_log = merged_va.to_authorship_log()?; authorship_log.metadata.base_commit_sha = end_sha.to_string(); - debug_log(&format!( + tracing::debug!( "Created authorship log with {} attestations, {} prompts", authorship_log.attestations.len(), authorship_log.metadata.prompts.len() - )); + ); Ok(authorship_log) } diff --git a/src/authorship/rebase_authorship.rs b/src/authorship/rebase_authorship.rs index 29d223a58..caf9580ea 100644 --- a/src/authorship/rebase_authorship.rs +++ b/src/authorship/rebase_authorship.rs @@ -7,7 +7,6 @@ use crate::git::authorship_traversal::{ use crate::git::refs::{get_reference_as_authorship_log_v3, note_blob_oids_for_commits}; use crate::git::repository::{CommitRange, Repository, exec_git, exec_git_stdin}; use crate::git::rewrite_log::RewriteLogEvent; -use crate::utils::{debug_log, debug_performance_log}; use std::collections::{BTreeMap, HashMap, HashSet}; #[derive(Clone, Copy, Default)] @@ -144,10 +143,11 @@ pub fn rewrite_authorship_if_needed( commit_author, )?; - debug_log(&format!( + tracing::debug!( "Ammended commit {} now has authorship log {}", - &commit_amend.original_commit, &commit_amend.amended_commit_sha - )); + &commit_amend.original_commit, + &commit_amend.amended_commit_sha + ); } RewriteLogEvent::MergeSquash { merge_squash } => { let current_head = repo @@ -156,10 +156,10 @@ pub fn rewrite_authorship_if_needed( .and_then(|head| head.target().ok()) .map(|oid| oid.to_string()); if current_head.as_deref() != Some(merge_squash.base_head.as_str()) { - debug_log(&format!( + tracing::debug!( "Skipping merge --squash pre-commit prep because repo head already advanced past {}", merge_squash.base_head - )); + ); return Ok(()); } // --squash always fails if repo is not clean @@ -167,10 +167,10 @@ pub fn rewrite_authorship_if_needed( repo.storage .delete_working_log_for_base_commit(&merge_squash.base_head)?; if merge_squash.staged_file_blobs.is_empty() { - debug_log(&format!( + tracing::debug!( "Skipping immediate merge --squash pre-commit prep for {} because no staged snapshot was captured; commit replay will reconstruct from the committed final state", merge_squash.base_head - )); + ); return Ok(()); } @@ -183,10 +183,11 @@ pub fn rewrite_authorship_if_needed( &commit_author, )?; - debug_log(&format!( + tracing::debug!( "✓ Prepared authorship attributions for merge --squash of {} into {}", - merge_squash.source_branch, merge_squash.base_branch - )); + merge_squash.source_branch, + merge_squash.base_branch + ); } RewriteLogEvent::RebaseComplete { rebase_complete } => { rewrite_authorship_after_rebase_v2( @@ -203,10 +204,10 @@ pub fn rewrite_authorship_if_needed( &rebase_complete.new_head, )?; - debug_log(&format!( + tracing::debug!( "✓ Rewrote authorship for {} rebased commits", rebase_complete.new_commits.len() - )); + ); } RewriteLogEvent::CherryPickComplete { cherry_pick_complete, @@ -224,10 +225,10 @@ pub fn rewrite_authorship_if_needed( &commit_author, )?; - debug_log(&format!( + tracing::debug!( "✓ Rewrote authorship for {} cherry-picked commits", cherry_pick_complete.new_commits.len() - )); + ); } _ => {} } @@ -266,15 +267,16 @@ fn migrate_working_log_after_rebase( if !initial.files.is_empty() { let new_wl = repo.storage.working_log_for_base_commit(new_head)?; new_wl.write_initial(initial)?; - debug_log(&format!( + tracing::debug!( "Migrated INITIAL attributions from {} to {}", - original_head, new_head - )); + original_head, + new_head + ); } else { - debug_log(&format!( + tracing::debug!( "No INITIAL attributions to migrate from {} (dropping old working log)", original_head - )); + ); } repo.storage .delete_working_log_for_base_commit(original_head)?; @@ -576,10 +578,11 @@ pub fn rewrite_authorship_after_squash_or_rebase( }; let target_branch_head_sha = target_branch_head.id().to_string(); - debug_log(&format!( + tracing::debug!( "Rewriting authorship for squash/rebase merge: {} -> {}", - source_head_sha, merge_commit_sha - )); + source_head_sha, + merge_commit_sha + ); // Step 2: Find merge base between source and target to optimize blame // We only need to look at commits after the merge base, not entire history @@ -607,7 +610,7 @@ pub fn rewrite_authorship_after_squash_or_rebase( if changed_files.is_empty() { if commits_have_authorship_notes(repo, &source_commits)? { - debug_log( + tracing::debug!( "No AI-touched files in merge, but notes exist in source commits; writing empty authorship log", ); if let Some(authorship_log) = build_metadata_only_authorship_log_from_source_notes( @@ -622,15 +625,15 @@ pub fn rewrite_authorship_after_squash_or_rebase( } } else { // No files changed, nothing to do - debug_log("No files changed in merge, skipping authorship rewrite"); + tracing::debug!("No files changed in merge, skipping authorship rewrite"); } return Ok(()); } - debug_log(&format!( + tracing::debug!( "Processing {} changed files for merge authorship", changed_files.len() - )); + ); // Step 4: Create VirtualAttributions for both branches // Use merge_base to limit blame range for performance @@ -660,10 +663,10 @@ pub fn rewrite_authorship_after_squash_or_rebase( // Step 4: Read committed files from merge commit (captures final state with conflict resolutions) let committed_files = get_committed_files_content(repo, merge_commit_sha, &changed_files)?; - debug_log(&format!( + tracing::debug!( "Read {} committed files from merge commit", committed_files.len() - )); + ); // Step 5: Merge VirtualAttributions, favoring target branch (base) let merged_va = merge_attributions_favoring_first(target_va, source_va, committed_files)?; @@ -694,11 +697,11 @@ pub fn rewrite_authorship_after_squash_or_rebase( } } - debug_log(&format!( + tracing::debug!( "Created authorship log with {} attestations, {} prompts", authorship_log.attestations.len(), authorship_log.metadata.prompts.len() - )); + ); // Step 7: Save authorship log to git notes let authorship_json = authorship_log @@ -707,10 +710,10 @@ pub fn rewrite_authorship_after_squash_or_rebase( crate::git::refs::notes_add(repo, merge_commit_sha, &authorship_json)?; - debug_log(&format!( + tracing::debug!( "✓ Saved authorship log for merge commit {}", merge_commit_sha - )); + ); Ok(()) } @@ -1087,12 +1090,12 @@ pub fn rewrite_authorship_after_rebase_v2( "load_rebase_note_cache".to_string(), phase_start.elapsed().as_millis(), )); - debug_performance_log(&format!( + tracing::debug!( "rebase_v2: loaded note cache ({} original notes, {} new with notes) in {}ms", note_cache.original_note_contents.len(), note_cache.new_commits_with_notes.len(), phase_start.elapsed().as_millis() - )); + ); // Filter out commits that already have authorship logs (these are commits from the target branch). let force_process_existing_notes = original_commits.len() > new_commits.len(); @@ -1102,10 +1105,7 @@ pub fn rewrite_authorship_after_rebase_v2( let has_log = !force_process_existing_notes && note_cache.new_commits_with_notes.contains(commit.as_str()); if has_log { - debug_log(&format!( - "Skipping commit {} (already has authorship log)", - commit - )); + tracing::debug!("Skipping commit {} (already has authorship log)", commit); } !has_log }) @@ -1113,15 +1113,15 @@ pub fn rewrite_authorship_after_rebase_v2( .collect(); if commits_to_process.is_empty() { - debug_log("No new commits to process (all commits already have authorship logs)"); + tracing::debug!("No new commits to process (all commits already have authorship logs)"); return Ok(()); } - debug_log(&format!( + tracing::debug!( "Processing {} newly created commits (skipped {} existing commits)", commits_to_process.len(), new_commits.len() - commits_to_process.len() - )); + ); let commits_to_process_lookup: HashSet<&str> = commits_to_process.iter().map(String::as_str).collect(); let all_commit_pairs = pair_commits_for_rewrite(repo, original_commits, new_commits); @@ -1167,23 +1167,23 @@ pub fn rewrite_authorship_after_rebase_v2( let remapped_count = remap_notes_for_commit_pairs(repo, &commit_pairs_to_process, &original_note_contents)?; if remapped_count > 0 { - debug_log(&format!( + tracing::debug!( "Remapped {} metadata-only authorship notes for rebase commits", remapped_count - )); + ); } else { - debug_log("No AI-touched files and no source notes to remap during rebase"); + tracing::debug!("No AI-touched files and no source notes to remap during rebase"); } return Ok(()); } let pathspecs_lookup: HashSet<&str> = pathspecs.iter().map(String::as_str).collect(); - debug_log(&format!( + tracing::debug!( "Processing rebase: {} files modified across {} original commits -> {} new commits", pathspecs.len(), original_commits.len(), new_commits.len() - )); + ); if try_fast_path_rebase_note_remap_cached( repo, @@ -1273,14 +1273,14 @@ pub fn rewrite_authorship_after_rebase_v2( ¬e_cache, &original_hunks_by_commit, ) { - debug_log("Using fast note-based attribution reconstruction (skipping blame)"); + tracing::debug!("Using fast note-based attribution reconstruction (skipping blame)"); let ts = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .unwrap_or_default() .as_millis(); (attrs, contents, prompts, humans, ts) } else { - debug_log("Falling back to VirtualAttributions (blame-based reconstruction)"); + tracing::debug!("Falling back to VirtualAttributions (blame-based reconstruction)"); let new_head = new_commits.last().unwrap(); let merge_base = repo .merge_base(original_head.to_string(), new_head.to_string()) @@ -1486,12 +1486,12 @@ pub fn rewrite_authorship_after_rebase_v2( BTreeMap::new(); for (idx, new_commit) in commits_to_process.iter().enumerate() { - debug_log(&format!( + tracing::debug!( "Processing commit {}/{}: {}", idx + 1, commits_to_process.len(), new_commit - )); + ); let (changed_files_in_commit, new_content_for_changed_files) = changed_contents_by_commit .remove(new_commit) @@ -1814,17 +1814,18 @@ pub fn rewrite_authorship_after_rebase_v2( )); for (commit_sha, file_count) in pending_note_debug { - debug_log(&format!( + tracing::debug!( "Saved authorship log for commit {} ({} files)", - commit_sha, file_count - )); + commit_sha, + file_count + ); } let total_ms = rewrite_start.elapsed().as_millis(); - debug_performance_log(&format!( + tracing::debug!( "rebase_v2: TOTAL rewrite_authorship_after_rebase_v2 in {}ms", total_ms - )); + ); // Write detailed timing breakdown for benchmarking if let Ok(timing_path) = std::env::var("GIT_AI_REBASE_TIMING_FILE") { @@ -1875,11 +1876,11 @@ pub fn rewrite_authorship_after_cherry_pick( ))); } - debug_log(&format!( + tracing::debug!( "Processing cherry-pick: {} source commits -> {} new commits", source_commits.len(), new_commits.len() - )); + ); let commit_pairs: Vec<(String, String)> = source_commits .iter() @@ -1900,12 +1901,12 @@ pub fn rewrite_authorship_after_cherry_pick( let remapped_count = remap_notes_for_commit_pairs(repo, &commit_pairs, &source_note_contents)?; if remapped_count > 0 { - debug_log(&format!( + tracing::debug!( "Remapped {} metadata-only authorship notes for cherry-picked commits", remapped_count - )); + ); } else { - debug_log("No files modified in source commits"); + tracing::debug!("No files modified in source commits"); } return Ok(()); } @@ -1917,11 +1918,11 @@ pub fn rewrite_authorship_after_cherry_pick( let mut source_note_content_by_new_commit: HashMap = HashMap::new(); let mut source_note_content_loaded = false; - debug_log(&format!( + tracing::debug!( "Processing cherry-pick: {} files modified across {} source commits", pathspecs.len(), source_commits.len() - )); + ); // Step 2: Create VirtualAttributions from the LAST source commit // This is the key difference from rebase: cherry-pick applies patches sequentially, @@ -1967,12 +1968,12 @@ pub fn rewrite_authorship_after_cherry_pick( // Step 3: Process each new commit in order (oldest to newest) for (idx, new_commit) in new_commits.iter().enumerate() { - debug_log(&format!( + tracing::debug!( "Processing cherry-picked commit {}/{}: {}", idx + 1, new_commits.len(), new_commit - )); + ); // Get the DIFF for this commit (what actually changed) let commit_obj = repo.find_commit(new_commit.clone())?; @@ -2044,11 +2045,11 @@ pub fn rewrite_authorship_after_cherry_pick( crate::git::refs::notes_add(repo, new_commit, &authorship_json)?; - debug_log(&format!( + tracing::debug!( "Saved authorship log for cherry-picked commit {} ({} files)", new_commit, authorship_log.attestations.len() - )); + ); } Ok(()) @@ -2990,14 +2991,15 @@ pub fn reconstruct_working_log_after_reset( || is_zero_oid(target_commit_sha) || is_zero_oid(old_head_sha) { - debug_log("Skipping reset working-log reconstruction for invalid zero/empty oid"); + tracing::debug!("Skipping reset working-log reconstruction for invalid zero/empty oid"); return Ok(()); } - debug_log(&format!( + tracing::debug!( "Reconstructing working log after reset from {} to {}", - old_head_sha, target_commit_sha - )); + old_head_sha, + target_commit_sha + ); // Step 1: Get all files changed between target and old_head let all_changed_files = @@ -3031,17 +3033,17 @@ pub fn reconstruct_working_log_after_reset( let pathspecs = filter_pathspecs_to_ai_touched_files(repo, &commits_in_range, &pathspecs)?; if pathspecs.is_empty() { - debug_log("No files changed between commits, nothing to reconstruct"); + tracing::debug!("No files changed between commits, nothing to reconstruct"); // Still delete old working log repo.storage .delete_working_log_for_base_commit(old_head_sha)?; return Ok(()); } - debug_log(&format!( + tracing::debug!( "Processing {} files for reset authorship reconstruction", pathspecs.len() - )); + ); // Step 2: Build final state from the captured command-exit snapshot when available. let has_captured_snapshot = final_state_override.is_some(); @@ -3059,10 +3061,7 @@ pub fn reconstruct_working_log_after_reset( }; final_state.insert(file_path.clone(), content); } - debug_log(&format!( - "Read {} files from working directory", - final_state.len() - )); + tracing::debug!("Read {} files from working directory", final_state.len()); final_state }; @@ -3098,11 +3097,11 @@ pub fn reconstruct_working_log_after_reset( })? }; - debug_log(&format!( + tracing::debug!( "Built old_head VA with {} files, {} prompts", old_head_va.files().len(), old_head_va.prompts().len() - )); + ); // Step 4: Build VirtualAttributions from target_commit. // @@ -3146,20 +3145,17 @@ pub fn reconstruct_working_log_after_reset( final_state.clone(), )?; - debug_log(&format!( - "Merged VAs, result has {} files", - merged_va.files().len() - )); + tracing::debug!("Merged VAs, result has {} files", merged_va.files().len()); // Step 6: Convert to INITIAL (everything is uncommitted after reset) without consulting the // live worktree again. let initial_attributions = merged_va.to_initial_working_log_only(); - debug_log(&format!( + tracing::debug!( "Generated INITIAL attributions for {} files, {} prompts", initial_attributions.files.len(), initial_attributions.prompts.len() - )); + ); // Step 7: Write INITIAL file let new_working_log = repo @@ -3180,10 +3176,10 @@ pub fn reconstruct_working_log_after_reset( repo.storage .delete_working_log_for_base_commit(old_head_sha)?; - debug_log(&format!( + tracing::debug!( "✓ Wrote INITIAL attributions to working log for {}", target_commit_sha - )); + ); Ok(()) } @@ -3474,11 +3470,11 @@ fn try_fast_path_rebase_note_remap_cached( if !tracked_paths_match_for_commit_pairs(repo, &commits_to_remap, tracked_paths)? { return Ok(false); } - debug_performance_log(&format!( + tracing::debug!( "Fast-path rebase note remap: compared tracked blobs for {} commit pairs in {}ms", commits_to_remap.len(), compare_start.elapsed().as_millis() - )); + ); // Use cached note blob OIDs and contents instead of additional git calls. for (original_commit, _) in &commits_to_remap { @@ -3506,20 +3502,20 @@ fn try_fast_path_rebase_note_remap_cached( let write_start = std::time::Instant::now(); crate::git::refs::notes_add_batch(repo, &remapped_note_entries)?; - debug_performance_log(&format!( + tracing::debug!( "Fast-path rebase note remap: wrote {} remapped notes in {}ms", remapped_count, write_start.elapsed().as_millis() - )); + ); - debug_log(&format!( + tracing::debug!( "Fast-path remapped authorship logs for {} commits (blob-equivalent tracked files)", remapped_count - )); - debug_performance_log(&format!( + ); + tracing::debug!( "Fast-path rebase note remap complete in {}ms", fast_path_start.elapsed().as_millis() - )); + ); Ok(true) } @@ -3537,11 +3533,11 @@ fn try_fast_path_cherry_pick_note_remap( if !tracked_paths_match_for_commit_pairs(repo, commit_pairs, tracked_paths)? { return Ok(false); } - debug_performance_log(&format!( + tracing::debug!( "Fast-path cherry-pick note remap: compared tracked blobs for {} commit pairs in {}ms", commit_pairs.len(), compare_start.elapsed().as_millis() - )); + ); let source_commits: Vec = commit_pairs .iter() @@ -3549,11 +3545,11 @@ fn try_fast_path_cherry_pick_note_remap( .collect(); let note_oid_lookup_start = std::time::Instant::now(); let source_note_blob_oids = note_blob_oids_for_commits(repo, &source_commits)?; - debug_performance_log(&format!( + tracing::debug!( "Fast-path cherry-pick note remap: resolved {} note blob oids in {}ms", source_note_blob_oids.len(), note_oid_lookup_start.elapsed().as_millis() - )); + ); if source_note_blob_oids.len() != source_commits.len() { return Ok(false); } @@ -3596,20 +3592,20 @@ fn try_fast_path_cherry_pick_note_remap( let write_start = std::time::Instant::now(); crate::git::refs::notes_add_batch(repo, &remapped_note_entries)?; - debug_performance_log(&format!( + tracing::debug!( "Fast-path cherry-pick note remap: wrote {} remapped notes in {}ms", remapped_count, write_start.elapsed().as_millis() - )); + ); - debug_log(&format!( + tracing::debug!( "Fast-path remapped authorship logs for {} cherry-picked commits (blob-equivalent tracked files)", remapped_count - )); - debug_performance_log(&format!( + ); + tracing::debug!( "Fast-path cherry-pick note remap complete in {}ms", fast_path_start.elapsed().as_millis() - )); + ); Ok(true) } diff --git a/src/authorship/stats.rs b/src/authorship/stats.rs index 20e9eed39..ad8bcedbe 100644 --- a/src/authorship/stats.rs +++ b/src/authorship/stats.rs @@ -4,7 +4,6 @@ use crate::authorship::transcript::Message; use crate::error::GitAiError; use crate::git::refs::get_authorship; use crate::git::repository::Repository; -use crate::utils::debug_log; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, HashMap}; @@ -77,10 +76,11 @@ pub fn stats_command( (target, name) }; - debug_log(&format!( + tracing::debug!( "Stats command found commit: {} refname: {}", - target, refname - )); + target, + refname + ); let stats = stats_for_commit_stats(repo, &target, ignore_patterns)?; diff --git a/src/authorship/virtual_attribution.rs b/src/authorship/virtual_attribution.rs index 2e06e0c46..80268a441 100644 --- a/src/authorship/virtual_attribution.rs +++ b/src/authorship/virtual_attribution.rs @@ -2199,18 +2199,13 @@ pub fn restore_stashed_va( new_head: &str, stashed_va: VirtualAttributions, ) { - use crate::utils::debug_log; - - debug_log(&format!( - "Restoring stashed VA: {} -> {}", - old_head, new_head - )); + tracing::debug!("Restoring stashed VA: {} -> {}", old_head, new_head); // Get the files that were in the stashed VA let stashed_files: Vec = stashed_va.files(); if stashed_files.is_empty() { - debug_log("Stashed VA has no files, nothing to restore"); + tracing::debug!("Stashed VA has no files, nothing to restore"); return; } @@ -2227,10 +2222,10 @@ pub fn restore_stashed_va( // file may contain conflict markers. We keep "ours" (stashed VA) lines // so the attribution merge operates on clean content. let clean_content = if content_has_conflict_markers(&content) { - debug_log(&format!( + tracing::debug!( "Conflict markers detected in {}, stripping for VA merge", file_path - )); + ); strip_conflict_markers_keep_ours(&content) } else { content @@ -2241,7 +2236,7 @@ pub fn restore_stashed_va( } if working_files.is_empty() { - debug_log("No working files to restore attributions for"); + tracing::debug!("No working files to restore attributions for"); return; } @@ -2253,7 +2248,7 @@ pub fn restore_stashed_va( ) { Ok(va) => va, Err(e) => { - debug_log(&format!("Failed to build new VA: {}, using empty", e)); + tracing::debug!("Failed to build new VA: {}, using empty", e); VirtualAttributions::new( repository.clone(), new_head.to_string(), @@ -2268,7 +2263,7 @@ pub fn restore_stashed_va( let merged_va = match merge_attributions_favoring_first(stashed_va, new_va, working_files) { Ok(va) => va, Err(e) => { - debug_log(&format!("Failed to merge VirtualAttributions: {}", e)); + tracing::debug!("Failed to merge VirtualAttributions: {}", e); return; } }; @@ -2293,10 +2288,7 @@ pub fn restore_stashed_va( let working_log = match repository.storage.working_log_for_base_commit(new_head) { Ok(wl) => wl, Err(e) => { - debug_log(&format!( - "Failed to get working log for {}: {}", - new_head, e - )); + tracing::debug!("Failed to get working log for {}: {}", new_head, e); return; } }; @@ -2310,14 +2302,14 @@ pub fn restore_stashed_va( initial_attributions.humans, initial_file_contents, ) { - debug_log(&format!("Failed to write INITIAL attributions: {}", e)); + tracing::debug!("Failed to write INITIAL attributions: {}", e); return; } - debug_log(&format!( - "✓ Restored AI attributions to INITIAL for new HEAD {}", + tracing::debug!( + "Restored AI attributions to INITIAL for new HEAD {}", &new_head[..8.min(new_head.len())] - )); + ); } } diff --git a/src/commands/checkpoint.rs b/src/commands/checkpoint.rs index 924f84074..44ac04a9e 100644 --- a/src/commands/checkpoint.rs +++ b/src/commands/checkpoint.rs @@ -18,7 +18,7 @@ use crate::error::GitAiError; use crate::git::repo_storage::PersistedWorkingLog; use crate::git::repository::Repository; use crate::git::status::{EntryKind, StatusCode}; -use crate::utils::{debug_log, normalize_to_posix}; +use crate::utils::normalize_to_posix; use futures::stream::{self, StreamExt}; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; @@ -263,13 +263,13 @@ fn cleanup_failed_captured_checkpoint_prepare( if let Err(cleanup_error) = fs::remove_dir_all(capture_dir) && cleanup_error.kind() != std::io::ErrorKind::NotFound { - debug_log(&format!( + tracing::debug!( "failed cleaning up incomplete captured checkpoint {} at {} after error {}: {}", capture_id, capture_dir.display(), error, cleanup_error - )); + ); } } @@ -374,7 +374,7 @@ pub(crate) fn run_with_base_commit_override_with_policy( base_override_resolution_policy: BaseOverrideResolutionPolicy, ) -> Result<(usize, usize, usize), GitAiError> { let checkpoint_start = Instant::now(); - debug_log("[BENCHMARK] Starting checkpoint run"); + tracing::debug!("[BENCHMARK] Starting checkpoint run"); let resolved = resolve_live_checkpoint_execution( repo, kind, @@ -384,10 +384,10 @@ pub(crate) fn run_with_base_commit_override_with_policy( base_override_resolution_policy, )?; let Some(resolved) = resolved else { - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Total checkpoint run took {:?}", checkpoint_start.elapsed() - )); + ); return Ok((0, 0, 0)); }; @@ -630,10 +630,10 @@ fn resolve_live_checkpoint_execution( let storage_start = Instant::now(); let repo_storage = repo.storage.clone(); let mut working_log = repo_storage.working_log_for_base_commit(&base_commit)?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Storage initialization took {:?}", storage_start.elapsed() - )); + ); if is_pre_commit && base_commit_override.is_none() { let has_no_ai_edits = working_log @@ -648,7 +648,7 @@ fn resolve_live_checkpoint_execution( && !Config::get().get_feature_flags().inter_commit_move && !has_explicit_ai_agent_context { - debug_log("No AI edits in pre-commit checkpoint, skipping"); + tracing::debug!("No AI edits in pre-commit checkpoint, skipping"); return Ok(None); } } @@ -665,10 +665,10 @@ fn resolve_live_checkpoint_execution( let has_explicit_target_paths = explicit_capture_target_paths(kind, agent_run_result).is_some(); let pathspec_start = Instant::now(); let filtered_pathspec = filtered_pathspecs_for_agent_run_result(repo, kind, agent_run_result); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Pathspec filtering took {:?}", pathspec_start.elapsed() - )); + ); // Base-override replays already provide the exact file list and content snapshot that // should be checkpointed. Re-running git status here turns daemon commit replay into a @@ -687,10 +687,10 @@ fn resolve_live_checkpoint_execution( &ignore_matcher, ) { Ok(Some(resolved)) => { - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Reusing {} explicit dirty file(s) for base override checkpoint", resolved.files.len() - )); + ); return Ok(Some(resolved)); } Ok(None) => { @@ -747,11 +747,11 @@ fn resolve_live_checkpoint_execution( is_pre_commit && filtered_pathspec.is_some(), &ignore_matcher, )?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] get_all_tracked_files found {} files, took {:?}", files.len(), files_start.elapsed() - )); + ); let dirty_files = files .iter() @@ -792,11 +792,11 @@ fn execute_resolved_checkpoint( let read_checkpoints_start = Instant::now(); let mut checkpoints = working_log.read_all_checkpoints()?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Reading {} checkpoints took {:?}", checkpoints.len(), read_checkpoints_start.elapsed() - )); + ); // Reject KnownHuman checkpoints that arrive within KNOWN_HUMAN_MIN_SECS_AFTER_AI // seconds of an AI checkpoint on any of the same files. These are likely spurious @@ -815,21 +815,21 @@ fn execute_resolved_checkpoint( && cp.entries.iter().any(|e| resolved.files.contains(&e.file)) }); if too_soon { - debug_log(&format!( + tracing::debug!( "[KnownHuman] Rejected: fired within {}s of an AI checkpoint on the same file", KNOWN_HUMAN_MIN_SECS_AFTER_AI - )); + ); return Ok((0, 0, 0)); } } let save_states_start = Instant::now(); let file_content_hashes = save_current_file_states(&working_log, &resolved.files)?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] save_current_file_states for {} files took {:?}", resolved.files.len(), save_states_start.elapsed() - )); + ); let hash_compute_start = Instant::now(); let mut ordered_hashes: Vec<_> = file_content_hashes.iter().collect(); @@ -841,10 +841,10 @@ fn execute_resolved_checkpoint( combined_hasher.update(hash.as_bytes()); } let combined_hash = format!("{:x}", combined_hasher.finalize()); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Hash computation took {:?}", hash_compute_start.elapsed() - )); + ); let entries_start = Instant::now(); let (entries, file_stats) = smol::block_on(get_checkpoint_entries( @@ -860,11 +860,11 @@ fn execute_resolved_checkpoint( is_pre_commit, Some(resolved.base_commit.as_str()), ))?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] get_checkpoint_entries generated {} entries, took {:?}", entries.len(), entries_start.elapsed() - )); + ); if !entries.is_empty() { let checkpoint_create_start = Instant::now(); @@ -902,10 +902,10 @@ fn execute_resolved_checkpoint( }); } } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Checkpoint creation took {:?}", checkpoint_create_start.elapsed() - )); + ); if kind.is_ai() && checkpoint.agent_id.is_some() @@ -916,10 +916,7 @@ fn execute_resolved_checkpoint( None, ) { - debug_log(&format!( - "[Warning] Failed to upsert prompt to database: {}", - e - )); + tracing::debug!("[Warning] Failed to upsert prompt to database: {}", e); crate::observability::log_error( &e, Some(serde_json::json!({ @@ -931,10 +928,10 @@ fn execute_resolved_checkpoint( let append_start = Instant::now(); working_log.append_checkpoint(&checkpoint)?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Appending checkpoint to working log took {:?}", append_start.elapsed() - )); + ); checkpoints.push(checkpoint.clone()); let attrs = @@ -1002,10 +999,10 @@ fn execute_resolved_checkpoint( } } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Total checkpoint run took {:?}", checkpoint_start.elapsed() - )); + ); Ok((entries.len(), resolved.files.len(), checkpoints.len())) } @@ -1197,7 +1194,7 @@ pub fn execute_captured_checkpoint( capture_id: &str, ) -> Result<(usize, usize, usize), GitAiError> { let checkpoint_start = Instant::now(); - debug_log("[BENCHMARK] Starting captured checkpoint replay"); + tracing::debug!("[BENCHMARK] Starting captured checkpoint replay"); let manifest = load_captured_checkpoint_manifest(capture_id)?; validate_captured_checkpoint_manifest_repo(repo, &manifest)?; @@ -1265,10 +1262,10 @@ fn get_status_of_files( let status_start = Instant::now(); let statuses = repo.status(edited_filepaths_option, skip_untracked)?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] git status call took {:?}", status_start.elapsed() - )); + ); for entry in statuses { // Skip ignored files @@ -1355,10 +1352,10 @@ fn get_all_tracked_files( let normalized_path = normalize_to_posix(file); // Filter out paths outside the repository to prevent git command failures if !is_path_in_repo(&normalized_path) { - debug_log(&format!( + tracing::debug!( "Skipping INITIAL file outside repository: {}", normalized_path - )); + ); continue; } if should_ignore_file_with_matcher(&normalized_path, ignore_matcher) { @@ -1368,10 +1365,10 @@ fn get_all_tracked_files( files.insert(normalized_path); } } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Reading INITIAL attributions in get_all_tracked_files took {:?}", initial_read_start.elapsed() - )); + ); let checkpoints_read_start = Instant::now(); if let Ok(working_log_data) = working_log.read_all_checkpoints() { @@ -1381,10 +1378,10 @@ fn get_all_tracked_files( let normalized_path = normalize_to_posix(&entry.file); // Filter out paths outside the repository to prevent git command failures if !is_path_in_repo(&normalized_path) { - debug_log(&format!( + tracing::debug!( "Skipping checkpoint file outside repository: {}", normalized_path - )); + ); continue; } if should_ignore_file_with_matcher(&normalized_path, ignore_matcher) { @@ -1399,10 +1396,10 @@ fn get_all_tracked_files( } } } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Reading checkpoints in get_all_tracked_files took {:?}", checkpoints_read_start.elapsed() - )); + ); let has_ai_checkpoints = if let Ok(working_log_data) = working_log.read_all_checkpoints() { working_log_data.iter().any(|checkpoint| { @@ -1418,10 +1415,10 @@ fn get_all_tracked_files( } else { get_status_of_files(repo, working_log, files, false, ignore_matcher)? }; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] get_status_of_files in get_all_tracked_files took {:?}", status_files_start.elapsed() - )); + ); // Ensure to always include all dirty files if let Some(ref dirty_files) = working_log.dirty_files { @@ -1430,10 +1427,10 @@ fn get_all_tracked_files( let normalized_path = normalize_to_posix(file_path); // Filter out paths outside the repository to prevent git command failures if !is_path_in_repo(&normalized_path) { - debug_log(&format!( + tracing::debug!( "Skipping dirty file outside repository: {}", normalized_path - )); + ); continue; } if should_ignore_file_with_matcher(&normalized_path, ignore_matcher) { @@ -1752,11 +1749,11 @@ fn get_checkpoint_entry_for_file( Some((line_authors, prompt_records)) }; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Blame for {} took {:?}", file_path, blame_start.elapsed() - )); + ); // Add blame results for lines NOT covered by INITIAL if let Some((blames, _)) = ai_blame { @@ -1870,11 +1867,11 @@ fn get_checkpoint_entry_for_file( content: ¤t_content, ts, })?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Processing file {} took {:?}", file_path, file_start.elapsed() - )); + ); Ok(Some((entry, stats))) } @@ -1907,18 +1904,18 @@ async fn get_checkpoint_entries( }) .collect(); let initial_attributions = initial_data.files; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Reading initial attributions took {:?}", initial_read_start.elapsed() - )); + ); let precompute_start = Instant::now(); let (previous_file_state_by_file, ai_touched_files) = build_previous_file_state_maps(previous_checkpoints, &initial_attributions); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Precomputing previous state maps took {:?}", precompute_start.elapsed() - )); + ); // Determine author_id based on checkpoint kind and agent_id let author_id = match kind { @@ -2019,20 +2016,20 @@ async fn get_checkpoint_entries( tasks.push(task); } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Spawning {} tasks took {:?}", tasks.len(), spawn_start.elapsed() - )); + ); // Await all tasks concurrently let await_start = Instant::now(); let results = futures::future::join_all(tasks).await; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Awaiting {} tasks took {:?}", results.len(), await_start.elapsed() - )); + ); // Process results let process_start = Instant::now(); @@ -2049,15 +2046,15 @@ async fn get_checkpoint_entries( Err(e) => return Err(e), } } - debug_log(&format!( + tracing::debug!( "[BENCHMARK] Processing {} results took {:?}", results_count, process_start.elapsed() - )); - debug_log(&format!( + ); + tracing::debug!( "[BENCHMARK] get_checkpoint_entries function total took {:?}", entries_fn_start.elapsed() - )); + ); Ok((entries, file_stats)) } @@ -2096,11 +2093,11 @@ fn make_entry_for_file( &CheckpointKind::Human.to_str(), ts - 1, ); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] attribute_unattributed_ranges for {} took {:?}", file_path, fill_start.elapsed() - )); + ); let update_start = Instant::now(); let new_attributions = tracker.update_attributions_for_checkpoint( @@ -2111,11 +2108,11 @@ fn make_entry_for_file( ts, is_ai_checkpoint, )?; - debug_log(&format!( + tracing::debug!( "[BENCHMARK] update_attributions for {} took {:?}", file_path, update_start.elapsed() - )); + ); // TODO Consider discarding any "uncontentious" attributions for the human author. Any human attributions that do not share a line with any other author's attributions can be discarded. // let filtered_attributions = crate::authorship::attribution_tracker::discard_uncontentious_attributions_for_author(&new_attributions, &CheckpointKind::Human.to_str()); @@ -2127,20 +2124,20 @@ fn make_entry_for_file( content, is_ai_checkpoint, ); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] attributions_to_line_attributions for {} took {:?}", file_path, line_attr_start.elapsed() - )); + ); // Compute line stats while we already have both contents in memory let stats_start = Instant::now(); let line_stats = compute_file_line_stats(previous_content, content); - debug_log(&format!( + tracing::debug!( "[BENCHMARK] compute_file_line_stats for {} took {:?}", file_path, stats_start.elapsed() - )); + ); let entry = WorkingLogEntry::new( file_path.to_string(), diff --git a/src/commands/checkpoint_agent/agent_presets.rs b/src/commands/checkpoint_agent/agent_presets.rs index d6142bf39..218fa66b0 100644 --- a/src/commands/checkpoint_agent/agent_presets.rs +++ b/src/commands/checkpoint_agent/agent_presets.rs @@ -85,10 +85,12 @@ pub(crate) fn prepare_agent_bash_pre_hook( ) { Ok(result) => result.captured_checkpoint.map(|info| info.capture_id), Err(error) => { - crate::utils::debug_log(&format!( + tracing::debug!( "Bash pre-hook snapshot failed for {} session {}: {}", - agent_id.tool, session_id, error - )); + agent_id.tool, + session_id, + error + ); None } } @@ -321,7 +323,7 @@ impl AgentCheckpointPreset for ClaudePreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, // shouldn't happen on post Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } @@ -1035,7 +1037,7 @@ impl AgentCheckpointPreset for GeminiPreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } @@ -1205,7 +1207,7 @@ impl AgentCheckpointPreset for ContinueCliPreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } @@ -1453,7 +1455,7 @@ impl AgentCheckpointPreset for CodexPreset { .captured_checkpoint_id(); if pre_hook_captured_id.is_some() { - crate::utils::debug_log( + tracing::debug!( "Codex PreToolUse captured a bash pre-snapshot but will skip emitting a checkpoint", ); } @@ -1484,7 +1486,7 @@ impl AgentCheckpointPreset for CodexPreset { Ok(BashCheckpointAction::Fallback) => None, Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Codex bash post-hook error: {}", e)); + tracing::debug!("Codex bash post-hook error: {}", e); None } }; @@ -3356,7 +3358,7 @@ impl AgentCheckpointPreset for DroidPreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } diff --git a/src/commands/checkpoint_agent/amp_preset.rs b/src/commands/checkpoint_agent/amp_preset.rs index ba674471f..ad81aeced 100644 --- a/src/commands/checkpoint_agent/amp_preset.rs +++ b/src/commands/checkpoint_agent/amp_preset.rs @@ -206,7 +206,7 @@ impl AgentCheckpointPreset for AmpPreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } diff --git a/src/commands/checkpoint_agent/bash_tool.rs b/src/commands/checkpoint_agent/bash_tool.rs index 193a734d8..8d2ad4985 100644 --- a/src/commands/checkpoint_agent/bash_tool.rs +++ b/src/commands/checkpoint_agent/bash_tool.rs @@ -14,7 +14,7 @@ use crate::daemon::control_api::ControlRequest; use crate::daemon::{DaemonConfig, send_control_request_with_timeout}; use crate::error::GitAiError; use crate::git::find_repository_in_path; -use crate::utils::{checkpoint_delegation_enabled, debug_log, normalize_to_posix}; +use crate::utils::{checkpoint_delegation_enabled, normalize_to_posix}; use ignore::WalkBuilder; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use serde::{Deserialize, Serialize}; @@ -379,10 +379,12 @@ pub fn checkpoint_context_from_active_bash( latest_context: Some(active_context), .. } => { - debug_log(&format!( + tracing::debug!( "active bash context: found {} session {} tool_use_id {}", - active_context.agent_id.tool, active_context.session_id, active_context.tool_use_id - )); + active_context.agent_id.tool, + active_context.session_id, + active_context.tool_use_id + ); Some(( CheckpointKind::AiAgent, Some(active_context.into_agent_run_result(repo_working_dir.to_string())), @@ -392,7 +394,7 @@ pub fn checkpoint_context_from_active_bash( has_inflight_snapshot: true, latest_context: None, } => { - debug_log("active bash context: falling back to unscoped AI checkpoint"); + tracing::debug!("active bash context: falling back to unscoped AI checkpoint"); Some((CheckpointKind::AiAgent, None)) } ActiveBashSnapshotScan { @@ -582,10 +584,7 @@ pub fn build_gitignore(repo_root: &Path) -> Result { .collect(); for pattern in &shared_patterns { if let Err(e) = builder.add_line(None, pattern) { - debug_log(&format!( - "Warning: failed to add ignore pattern '{}': {}", - pattern, e - )); + tracing::debug!("Warning: failed to add ignore pattern '{}': {}", pattern, e); } } @@ -693,7 +692,7 @@ pub fn snapshot( elapsed_ms, entries.len() ); - debug_log(&msg); + tracing::debug!("{}", msg); crate::observability::log_message( &msg, "warning", @@ -709,7 +708,7 @@ pub fn snapshot( let entry = match result { Ok(e) => e, Err(e) => { - debug_log(&format!("Walker error: {}", e)); + tracing::debug!("Walker error: {}", e); continue; } }; @@ -744,10 +743,10 @@ pub fn snapshot( if !is_wm_covered(mtime_ns, effective_worktree_wm, &per_file_wm, &posix_key) { entries.insert(normalized, stat); if entries.len() > MAX_TRACKED_FILES { - debug_log(&format!( + tracing::debug!( "Snapshot: exceeded MAX_TRACKED_FILES ({}), skipping stat-diff", MAX_TRACKED_FILES - )); + ); return Err(GitAiError::Generic(format!( "repo has more than {} recently-modified files; skipping stat-diff", MAX_TRACKED_FILES @@ -756,16 +755,16 @@ pub fn snapshot( } } Err(e) => { - debug_log(&format!("Failed to stat {}: {}", abs_path.display(), e)); + tracing::debug!("Failed to stat {}: {}", abs_path.display(), e); } } } - debug_log(&format!( + tracing::debug!( "Snapshot: {} files scanned in {}ms", entries.len(), start.elapsed().as_millis() - )); + ); Ok(StatSnapshot { entries, @@ -840,11 +839,11 @@ pub fn save_snapshot(snapshot: &StatSnapshot) -> Result<(), GitAiError> { fs::write(&path, data).map_err(GitAiError::IoError)?; - debug_log(&format!( + tracing::debug!( "Saved pre-snapshot: {} ({} entries)", path.display(), snapshot.entries.len() - )); + ); Ok(()) } @@ -872,11 +871,11 @@ pub fn load_and_consume_snapshot( // Consume: remove the file after a successful read. let _ = fs::remove_file(&path); - debug_log(&format!( + tracing::debug!( "Loaded pre-snapshot: {} ({} entries)", path.display(), snapshot.entries.len() - )); + ); Ok(Some(snapshot)) } @@ -895,7 +894,7 @@ pub fn cleanup_stale_snapshots(repo_root: &Path) -> Result<(), GitAiError> { && let Ok(age) = now.duration_since(modified) && age.as_secs() > SNAPSHOT_STALE_SECS { - debug_log(&format!("Cleaning stale snapshot: {}", path.display())); + tracing::debug!("Cleaning stale snapshot: {}", path.display()); let _ = fs::remove_file(&path); } } @@ -993,22 +992,22 @@ fn capture_file_contents(repo_root: &Path, file_paths: &[PathBuf]) -> HashMap f, Err(e) => { - debug_log(&format!( + tracing::debug!( "Skipping unreadable file for capture: {}: {}", rel_path.display(), e - )); + ); continue; } }; if let Ok(meta) = file.metadata() && meta.len() > MAX_CAPTURE_FILE_SIZE { - debug_log(&format!( + tracing::debug!( "Skipping large file for capture: {} ({} bytes)", rel_path.display(), meta.len() - )); + ); continue; } let mut content = String::new(); @@ -1018,11 +1017,11 @@ fn capture_file_contents(repo_root: &Path, file_paths: &[PathBuf]) -> HashMap { - debug_log(&format!( + tracing::debug!( "Skipping non-UTF8/unreadable file for capture: {}: {}", rel_path.display(), e - )); + ); } } } @@ -1065,10 +1064,10 @@ fn query_daemon_watermarks(repo_working_dir: &str) -> Option { .ok()?; if !response.ok { - debug_log(&format!( + tracing::debug!( "Daemon watermark query returned error: {}", response.error.as_deref().unwrap_or("unknown") - )); + ); return None; } @@ -1142,7 +1141,9 @@ fn attempt_pre_hook_capture( repo_root: &Path, ) -> Option { if !checkpoint_delegation_enabled() { - debug_log("Pre-hook capture: async checkpoint delegation not enabled, skipping capture"); + tracing::debug!( + "Pre-hook capture: async checkpoint delegation not enabled, skipping capture" + ); return None; } @@ -1153,15 +1154,15 @@ fn attempt_pre_hook_capture( let stale_files = find_stale_files(snap); if stale_files.is_empty() { - debug_log("Pre-hook capture: no stale files found, skipping"); + tracing::debug!("Pre-hook capture: no stale files found, skipping"); return None; } if stale_files.len() > MAX_STALE_FILES_FOR_CAPTURE { - debug_log(&format!( + tracing::debug!( "Pre-hook capture: {} stale files exceeds limit of {}, skipping", stale_files.len(), MAX_STALE_FILES_FOR_CAPTURE, - )); + ); return None; } @@ -1170,7 +1171,7 @@ fn attempt_pre_hook_capture( let repo = match find_repository_in_path(&repo_working_dir) { Ok(r) => r, Err(e) => { - debug_log(&format!("Pre-hook capture: failed to open repo: {}", e)); + tracing::debug!("Pre-hook capture: failed to open repo: {}", e); return None; } }; @@ -1205,24 +1206,25 @@ fn attempt_pre_hook_capture( None, // base_commit_override ) { Ok(Some(capture)) => { - debug_log(&format!( + tracing::debug!( "Pre-hook captured checkpoint prepared: {} ({} files)", - capture.capture_id, capture.file_count, - )); + capture.capture_id, + capture.file_count, + ); Some(CapturedCheckpointInfo { capture_id: capture.capture_id, repo_working_dir: capture.repo_working_dir, }) } Ok(None) => { - debug_log("Pre-hook capture: prepare_captured_checkpoint returned None"); + tracing::debug!("Pre-hook capture: prepare_captured_checkpoint returned None"); None } Err(e) => { - debug_log(&format!( + tracing::debug!( "Pre-hook capture: prepare_captured_checkpoint failed: {}", e - )); + ); None } } @@ -1243,7 +1245,9 @@ fn attempt_post_hook_capture( // Only attempt capture when delegation is enabled — captured checkpoint // files are consumed by the daemon; without it they would accumulate. if !checkpoint_delegation_enabled() { - debug_log("Post-hook capture: async checkpoint delegation not enabled, skipping capture"); + tracing::debug!( + "Post-hook capture: async checkpoint delegation not enabled, skipping capture" + ); return None; } @@ -1257,7 +1261,7 @@ fn attempt_post_hook_capture( .partition(|p| repo_root.join(p.as_str()).exists()); if existing_paths.is_empty() { - debug_log("Post-hook capture: no existing changed files to capture, skipping"); + tracing::debug!("Post-hook capture: no existing changed files to capture, skipping"); return None; } @@ -1270,7 +1274,7 @@ fn attempt_post_hook_capture( let repo = match find_repository_in_path(&repo_working_dir) { Ok(r) => r, Err(e) => { - debug_log(&format!("Post-hook capture: failed to open repo: {}", e)); + tracing::debug!("Post-hook capture: failed to open repo: {}", e); return None; } }; @@ -1301,24 +1305,25 @@ fn attempt_post_hook_capture( None, // base_commit_override ) { Ok(Some(capture)) => { - debug_log(&format!( + tracing::debug!( "Post-hook captured checkpoint prepared: {} ({} files)", - capture.capture_id, capture.file_count, - )); + capture.capture_id, + capture.file_count, + ); Some(CapturedCheckpointInfo { capture_id: capture.capture_id, repo_working_dir: capture.repo_working_dir, }) } Ok(None) => { - debug_log("Post-hook capture: prepare_captured_checkpoint returned None"); + tracing::debug!("Post-hook capture: prepare_captured_checkpoint returned None"); None } Err(e) => { - debug_log(&format!( + tracing::debug!( "Post-hook capture: prepare_captured_checkpoint failed: {}", e - )); + ); None } } @@ -1399,11 +1404,7 @@ fn resolve_bash_tool_use_id( if let Some(path) = bash_sidecar_path(repo_root, session_id) && let Err(e) = fs::write(&path, &id) { - debug_log(&format!( - "bash sidecar write failed ({}): {}", - path.display(), - e - )); + tracing::debug!("bash sidecar write failed ({}): {}", path.display(), e); } id } @@ -1421,7 +1422,7 @@ fn resolve_bash_tool_use_id( } // No sidecar found — caller passed "bash" for both hooks without a // matching pre-hook; fall back to the literal key (best effort). - debug_log("bash sidecar not found for post-hook; using 'bash' as fallback key"); + tracing::debug!("bash sidecar not found for post-hook; using 'bash' as fallback key"); FALLBACK.to_string() } } @@ -1454,7 +1455,7 @@ fn handle_bash_pre_tool_use_internal( "bash_tool: {} exceeded {}ms hook limit ({}ms elapsed); abandoning", $label, hook_timeout.as_millis(), elapsed_ms ); - debug_log(&msg); + tracing::debug!("{}", msg); crate::observability::log_message( &msg, "warning", @@ -1488,12 +1489,12 @@ fn handle_bash_pre_tool_use_internal( Ok(mut snap) => { snap.inflight_agent_context = inflight_agent_context; save_snapshot(&snap)?; - debug_log(&format!( + tracing::debug!( "Pre-snapshot stored for invocation {} ({} entries, effective_wm={:?})", invocation_key, snap.entries.len(), snap.effective_worktree_wm, - )); + ); if hook_start.elapsed() >= hook_timeout { hook_timeout_fallback!("pre-hook after snapshot"); @@ -1509,10 +1510,7 @@ fn handle_bash_pre_tool_use_internal( }) } Err(e) => { - debug_log(&format!( - "Pre-snapshot failed: {}; will use fallback on post", - e - )); + tracing::debug!("Pre-snapshot failed: {}; will use fallback on post", e); // Don't fail the tool call; post-hook will use fallback path Ok(BashToolResult { action: BashCheckpointAction::TakePreSnapshot, @@ -1568,7 +1566,7 @@ pub fn handle_bash_tool( "bash_tool: {} exceeded {}ms hook limit ({}ms elapsed); abandoning", $label, hook_timeout.as_millis(), elapsed_ms ); - debug_log(&msg); + tracing::debug!("{}", msg); crate::observability::log_message( &msg, "warning", @@ -1624,23 +1622,23 @@ pub fn handle_bash_tool( let diff_result = diff(&pre, &post); if diff_result.is_empty() { - debug_log(&format!( + tracing::debug!( "Bash tool {}: no changes detected", invocation_key - )); + ); Ok(BashToolResult { action: BashCheckpointAction::NoChanges, captured_checkpoint: None, }) } else { let paths = diff_result.all_changed_paths(); - debug_log(&format!( + tracing::debug!( "Bash tool {}: {} files changed ({} created, {} modified)", invocation_key, paths.len(), diff_result.created.len(), diff_result.modified.len(), - )); + ); if hook_start.elapsed() >= hook_timeout { hook_timeout_fallback!("post-hook after snapshot"); @@ -1657,7 +1655,7 @@ pub fn handle_bash_tool( } } Err(e) => { - debug_log(&format!("Post-snapshot failed: {}; returning fallback", e)); + tracing::debug!("Post-snapshot failed: {}; returning fallback", e); Ok(BashToolResult { action: BashCheckpointAction::Fallback, captured_checkpoint: None, @@ -1669,10 +1667,10 @@ pub fn handle_bash_tool( // Pre-snapshot lost (process restart, etc.) — return fallback. // We do not call git status here: it is extremely slow on large // monorepos and cannot be relied on at this point in the flow. - debug_log(&format!( + tracing::debug!( "Pre-snapshot not found for {}; returning fallback (no git status)", invocation_key - )); + ); Ok(BashToolResult { action: BashCheckpointAction::Fallback, captured_checkpoint: None, diff --git a/src/commands/checkpoint_agent/opencode_preset.rs b/src/commands/checkpoint_agent/opencode_preset.rs index 982c3d88b..5b0906793 100644 --- a/src/commands/checkpoint_agent/opencode_preset.rs +++ b/src/commands/checkpoint_agent/opencode_preset.rs @@ -267,7 +267,7 @@ impl AgentCheckpointPreset for OpenCodePreset { } Ok(BashCheckpointAction::TakePreSnapshot) => None, Err(e) => { - crate::utils::debug_log(&format!("Bash tool post-hook error: {}", e)); + tracing::debug!("Bash tool post-hook error: {}", e); None } } diff --git a/src/commands/checkpoint_agent/pi_preset.rs b/src/commands/checkpoint_agent/pi_preset.rs index c813db4fc..7a87badd5 100644 --- a/src/commands/checkpoint_agent/pi_preset.rs +++ b/src/commands/checkpoint_agent/pi_preset.rs @@ -182,7 +182,7 @@ impl AgentCheckpointPreset for PiPreset { | BashCheckpointAction::TakePreSnapshot => None, }, Err(e) => { - crate::utils::debug_log(&format!("Pi bash post-hook error: {}", e)); + tracing::debug!("Pi bash post-hook error: {}", e); None } }; diff --git a/src/commands/ci_handlers.rs b/src/commands/ci_handlers.rs index 3383e0268..fb0537134 100644 --- a/src/commands/ci_handlers.rs +++ b/src/commands/ci_handlers.rs @@ -2,7 +2,6 @@ use crate::ci::ci_context::{CiContext, CiEvent, CiRunOptions, CiRunResult}; use crate::ci::github::{get_github_ci_context, install_github_ci_workflow}; use crate::ci::gitlab::{get_gitlab_ci_context, print_gitlab_ci_yaml}; use crate::git::repository::find_repository_in_path; -use crate::utils::debug_log; /// Print a human-readable message for a CiRunResult fn print_ci_result(result: &CiRunResult, prefix: &str) { @@ -61,10 +60,10 @@ fn handle_ci_github(args: &[String]) { let ci_context = get_github_ci_context(); match ci_context { Ok(Some(ci_context)) => { - debug_log(&format!("GitHub CI context: {:?}", ci_context)); + tracing::debug!("GitHub CI context: {:?}", ci_context); match ci_context.run() { Ok(result) => { - debug_log(&format!("GitHub CI result: {:?}", result)); + tracing::debug!("GitHub CI result: {:?}", result); print_ci_result(&result, "GitHub CI"); } Err(e) => { @@ -77,9 +76,9 @@ fn handle_ci_github(args: &[String]) { eprintln!("Error tearing down GitHub CI context: {}", e); std::process::exit(1); } - debug_log("GitHub CI context teared down"); + tracing::debug!("GitHub CI context teared down"); } else { - debug_log("Skipping teardown (--no-cleanup)"); + tracing::debug!("Skipping teardown (--no-cleanup)"); } std::process::exit(0); } @@ -121,10 +120,10 @@ fn handle_ci_gitlab(args: &[String]) { let ci_context = get_gitlab_ci_context(); match ci_context { Ok(Some(ci_context)) => { - debug_log(&format!("GitLab CI context: {:?}", ci_context)); + tracing::debug!("GitLab CI context: {:?}", ci_context); match ci_context.run() { Ok(result) => { - debug_log(&format!("GitLab CI result: {:?}", result)); + tracing::debug!("GitLab CI result: {:?}", result); print_ci_result(&result, "GitLab CI"); } Err(e) => { @@ -137,9 +136,9 @@ fn handle_ci_gitlab(args: &[String]) { eprintln!("Error tearing down GitLab CI context: {}", e); std::process::exit(1); } - debug_log("GitLab CI context teared down"); + tracing::debug!("GitLab CI context teared down"); } else { - debug_log("Skipping teardown (--no-cleanup)"); + tracing::debug!("Skipping teardown (--no-cleanup)"); } std::process::exit(0); } @@ -260,13 +259,13 @@ fn handle_ci_local(args: &[String]) { temp_dir: std::path::PathBuf::from("."), }; - debug_log(&format!("Local CI context: {:?}", ctx)); + tracing::debug!("Local CI context: {:?}", ctx); match ctx.run_with_options(CiRunOptions { skip_fetch_notes, skip_fetch_base, }) { Ok(result) => { - debug_log(&format!("Local CI result: {:?}", result)); + tracing::debug!("Local CI result: {:?}", result); print_ci_result(&result, "Local CI (merge)"); } Err(e) => { diff --git a/src/commands/daemon.rs b/src/commands/daemon.rs index 868824884..57f5ad4a4 100644 --- a/src/commands/daemon.rs +++ b/src/commands/daemon.rs @@ -5,9 +5,7 @@ use crate::daemon::{ }; use crate::utils::LockFile; #[cfg(windows)] -use crate::utils::{ - CREATE_BREAKAWAY_FROM_JOB, CREATE_NEW_PROCESS_GROUP, CREATE_NO_WINDOW, debug_log, -}; +use crate::utils::{CREATE_BREAKAWAY_FROM_JOB, CREATE_NEW_PROCESS_GROUP, CREATE_NO_WINDOW}; use std::io::{BufRead, BufReader, Seek, SeekFrom}; #[cfg(windows)] use std::os::windows::process::CommandExt; @@ -306,10 +304,10 @@ fn spawn_daemon_run_detached(config: &DaemonConfig) -> Result<(), String> { match child.spawn() { Ok(_) => Ok(()), Err(preferred_err) => { - debug_log(&format!( + tracing::debug!( "detached daemon spawn with CREATE_BREAKAWAY_FROM_JOB failed, retrying without it: {}", preferred_err - )); + ); child.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); child.spawn().map(|_| ()).map_err(|fallback_err| { format!( @@ -373,10 +371,10 @@ fn spawn_daemon_run_with_piped_stderr( match child.spawn() { Ok(c) => Ok(c), Err(preferred_err) => { - debug_log(&format!( + tracing::debug!( "detached daemon spawn with CREATE_BREAKAWAY_FROM_JOB failed, retrying without it: {}", preferred_err - )); + ); child.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); child.spawn().map_err(|fallback_err| { format!( diff --git a/src/commands/git_ai_handlers.rs b/src/commands/git_ai_handlers.rs index 2bb04584a..408e62acd 100644 --- a/src/commands/git_ai_handlers.rs +++ b/src/commands/git_ai_handlers.rs @@ -1100,7 +1100,7 @@ fn handle_checkpoint(args: &[String]) { &effective_working_dir, ) { - crate::utils::debug_log("Using active bash context for pre-commit AI checkpoint"); + tracing::debug!("Using active bash context for pre-commit AI checkpoint"); checkpoint_kind = resolved_kind; agent_run_result = resolved_agent_run_result; } @@ -1347,10 +1347,10 @@ fn run_checkpoint_via_daemon_or_local( agent_run_result.as_ref(), ) { - crate::utils::debug_log(&format!( + tracing::debug!( "Failed to update captured checkpoint agent context: {}", e - )); + ); } let request = ControlRequest::CheckpointRun { diff --git a/src/commands/git_handlers.rs b/src/commands/git_handlers.rs index 459a92012..86d9b259e 100644 --- a/src/commands/git_handlers.rs +++ b/src/commands/git_handlers.rs @@ -24,7 +24,6 @@ use std::collections::HashSet; use crate::observability::wrapper_performance_targets::log_performance_target_if_violated; #[cfg(windows)] use crate::utils::CREATE_NO_WINDOW; -use crate::utils::debug_log; #[cfg(windows)] use crate::utils::is_interactive_terminal; #[cfg(unix)] @@ -159,7 +158,7 @@ pub fn handle_git(args: &[String]) { if let crate::daemon::telemetry_handle::DaemonTelemetryInitResult::Failed(e) = crate::daemon::telemetry_handle::init_daemon_telemetry_handle() { - debug_log(&format!("wrapper: daemon telemetry init failed: {}", e)); + tracing::debug!("wrapper: daemon telemetry init failed: {}", e); } let repository = find_repository(&parsed.global_args).ok(); @@ -205,7 +204,7 @@ pub fn handle_git(args: &[String]) { let skip_hooks = !config.is_allowed_repository(&repository_option); if skip_hooks { - debug_log( + tracing::debug!( "Skipping git-ai hooks because repository is excluded or not in allow_repositories list", ); } @@ -488,7 +487,7 @@ fn run_pre_command_hooks( "args": parsed_args.to_invocation_vec(), }); - debug_log(&error_message); + tracing::debug!("{}", error_message); observability::log_error(&HookPanicError(error_message.clone()), Some(context)); } } @@ -593,7 +592,7 @@ fn run_post_command_hooks( "args": parsed_args.to_invocation_vec(), }); - debug_log(&error_message); + tracing::debug!("{}", error_message); observability::log_error(&HookPanicError(error_message.clone()), Some(context)); } } @@ -779,10 +778,11 @@ fn send_wrapper_pre_state_to_daemon( &wt_str, head_state_to_repo_context(pre), ) { - debug_log(&format!( + tracing::debug!( "wrapper: failed to send pre-state for {}: {}", - invocation_id, e - )); + invocation_id, + e + ); } } @@ -801,10 +801,11 @@ fn send_wrapper_post_state_to_daemon( &wt_str, head_state_to_repo_context(post), ) { - debug_log(&format!( + tracing::debug!( "wrapper: failed to send post-state for {}: {}", - invocation_id, e - )); + invocation_id, + e + ); } } diff --git a/src/commands/git_hook_handlers.rs b/src/commands/git_hook_handlers.rs index a0d238274..d77b40af8 100644 --- a/src/commands/git_hook_handlers.rs +++ b/src/commands/git_hook_handlers.rs @@ -9,7 +9,6 @@ use crate::error::GitAiError; use crate::git::repository::Repository; -use crate::utils::debug_log; use serde::{Deserialize, Serialize}; use std::fs; use std::path::{Path, PathBuf}; @@ -362,11 +361,11 @@ fn read_repo_hook_state(path: &Path) -> Result, GitAiError match serde_json::from_str::(&content) { Ok(state) => Ok(Some(state)), Err(err) => { - debug_log(&format!( + tracing::debug!( "ignoring invalid repo hook state {}: {}", path.display(), err - )); + ); Ok(None) } } diff --git a/src/commands/hooks/checkout_hooks.rs b/src/commands/hooks/checkout_hooks.rs index 0cbee7334..798c2bf33 100644 --- a/src/commands/hooks/checkout_hooks.rs +++ b/src/commands/hooks/checkout_hooks.rs @@ -3,7 +3,6 @@ use crate::commands::git_handlers::CommandHooksContext; use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::git::cli_parser::ParsedGitInvocation; use crate::git::repository::Repository; -use crate::utils::debug_log; pub fn pre_checkout_hook( parsed_args: &ParsedGitInvocation, @@ -46,12 +45,14 @@ fn capture_va_for_merge( repository: &Repository, command_hooks_context: &mut CommandHooksContext, ) { - debug_log("Detected checkout --merge with uncommitted changes, capturing VirtualAttributions"); + tracing::debug!( + "Detected checkout --merge with uncommitted changes, capturing VirtualAttributions" + ); let head_sha = match repository.head().ok().and_then(|h| h.target().ok()) { Some(sha) => sha, None => { - debug_log("Failed to get HEAD for VA capture"); + tracing::debug!("Failed to get HEAD for VA capture"); return; } }; @@ -64,17 +65,17 @@ fn capture_va_for_merge( ) { Ok(va) => { if !va.attributions.is_empty() { - debug_log(&format!( + tracing::debug!( "Captured VA with {} files for checkout --merge preservation", va.attributions.len() - )); + ); command_hooks_context.stashed_va = Some(va); } else { - debug_log("No attributions in working log to preserve"); + tracing::debug!("No attributions in working log to preserve"); } } Err(e) => { - debug_log(&format!("Failed to build VirtualAttributions: {}", e)); + tracing::debug!("Failed to build VirtualAttributions: {}", e); } } } @@ -95,7 +96,7 @@ pub fn post_checkout_hook( // old_head) is indistinguishable from a conflict exit by exit code alone. The // HEAD-unchanged guard in Case 2 below handles that case for merge failures. if !exit_status.success() && !is_merge { - debug_log("Checkout failed, skipping working log handling"); + tracing::debug!("Checkout failed, skipping working log handling"); return; } @@ -114,10 +115,10 @@ pub fn post_checkout_hook( // Case 1: Pathspec checkout (git checkout branch -- file.txt) // HEAD unchanged, specific files reverted - remove their attributions if !pathspecs.is_empty() { - debug_log(&format!( + tracing::debug!( "Pathspec checkout detected, removing attributions for: {:?}", pathspecs - )); + ); remove_attributions_for_pathspecs(repository, &old_head, &pathspecs); return; } @@ -127,16 +128,16 @@ pub fn post_checkout_hook( // due to a bad branch name (HEAD stays, no VA to restore), and --merge checkouts // that succeed but stay on the same branch (no branch switch occurred). if old_head == new_head { - debug_log("HEAD unchanged after checkout, no working log handling needed"); + tracing::debug!("HEAD unchanged after checkout, no working log handling needed"); return; } // Case 3: Force checkout - delete working log (changes discarded) if is_force_checkout(parsed_args) { - debug_log(&format!( + tracing::debug!( "Force checkout detected, deleting working log for {}", &old_head - )); + ); let _ = repository .storage .delete_working_log_for_base_commit(&old_head); @@ -172,22 +173,21 @@ pub fn post_checkout_hook( }); if let Some(stashed_va) = stashed_va { - debug_log("Restoring VA after checkout --merge"); + tracing::debug!("Restoring VA after checkout --merge"); let _ = repository .storage .delete_working_log_for_base_commit(&old_head); restore_stashed_va(repository, &old_head, &new_head, stashed_va); return; } - debug_log("checkout --merge: no VA to restore, falling through to working log migration"); + tracing::debug!( + "checkout --merge: no VA to restore, falling through to working log migration" + ); // Fall through to Case 5 so the working log is renamed to the new HEAD. } // Case 5: Normal branch checkout - migrate working log - debug_log(&format!( - "Checkout changed HEAD: {} -> {}", - &old_head, &new_head - )); + tracing::debug!("Checkout changed HEAD: {} -> {}", &old_head, &new_head); let _ = repository.storage.rename_working_log(&old_head, &new_head); } @@ -196,7 +196,7 @@ fn remove_attributions_for_pathspecs(repository: &Repository, head: &str, pathsp let working_log = match repository.storage.working_log_for_base_commit(head) { Ok(wl) => wl, Err(e) => { - debug_log(&format!("Failed to get working log for {}: {}", head, e)); + tracing::debug!("Failed to get working log for {}: {}", head, e); return; } }; diff --git a/src/commands/hooks/cherry_pick_hooks.rs b/src/commands/hooks/cherry_pick_hooks.rs index 4a799f1fa..9cc5959a5 100644 --- a/src/commands/hooks/cherry_pick_hooks.rs +++ b/src/commands/hooks/cherry_pick_hooks.rs @@ -4,55 +4,56 @@ use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::git::cli_parser::{ParsedGitInvocation, is_dry_run}; use crate::git::repository::Repository; use crate::git::rewrite_log::RewriteLogEvent; -use crate::utils::debug_log; pub fn pre_cherry_pick_hook( parsed_args: &ParsedGitInvocation, repository: &mut Repository, _command_hooks_context: &mut CommandHooksContext, ) { - debug_log("=== CHERRY-PICK PRE-COMMAND HOOK ==="); + tracing::debug!("=== CHERRY-PICK PRE-COMMAND HOOK ==="); // Check if we're continuing an existing cherry-pick or starting a new one let cherry_pick_head = repository.path().join("CHERRY_PICK_HEAD"); let sequencer_dir = repository.path().join("sequencer"); let cherry_pick_in_progress = cherry_pick_head.exists() || sequencer_dir.exists(); - debug_log(&format!( + tracing::debug!( "Cherry-pick state check: CHERRY_PICK_HEAD={}, sequencer={}", cherry_pick_head.exists(), sequencer_dir.exists() - )); + ); // Check if there's an active Start event in the log that matches let has_active_start = has_active_cherry_pick_start_event(repository); let is_continuing = cherry_pick_in_progress && has_active_start; - debug_log(&format!( + tracing::debug!( "Cherry-pick state: in_progress={}, has_active_start={}, is_continuing={}", - cherry_pick_in_progress, has_active_start, is_continuing - )); + cherry_pick_in_progress, + has_active_start, + is_continuing + ); if !is_continuing { // Starting a new cherry-pick - capture original HEAD and log Start event if let Ok(head) = repository.head() { if let Ok(target) = head.target() { - debug_log(&format!("Starting new cherry-pick from HEAD: {}", target)); + tracing::debug!("Starting new cherry-pick from HEAD: {}", target); // Parse source commits from args let source_commits = parse_cherry_pick_commits(repository, &parsed_args.command_args); - debug_log(&format!( + tracing::debug!( "Cherry-picking {} commits: {:?}", source_commits.len(), source_commits - )); + ); // Fix #952: If source_commits is empty (e.g. bad args), skip writing // the Start event to prevent state corruption for subsequent operations. if source_commits.is_empty() { - debug_log( + tracing::debug!( "No valid source commits parsed, skipping CherryPickStart event (prevents state corruption from bad args)", ); return; @@ -68,15 +69,15 @@ pub fn pre_cherry_pick_hook( // Write to rewrite log match repository.storage.append_rewrite_event(start_event) { - Ok(_) => debug_log("✓ Logged CherryPickStart event"), - Err(e) => debug_log(&format!("✗ Failed to log CherryPickStart event: {}", e)), + Ok(_) => tracing::debug!("✓ Logged CherryPickStart event"), + Err(e) => tracing::debug!("✗ Failed to log CherryPickStart event: {}", e), } } } else { - debug_log("Could not read HEAD for new cherry-pick"); + tracing::debug!("Could not read HEAD for new cherry-pick"); } } else { - debug_log( + tracing::debug!( "Continuing existing cherry-pick (will read original head from log in post-hook)", ); // Fix #951: If --skip is being used, update source_commits to remove @@ -93,30 +94,30 @@ pub fn post_cherry_pick_hook( exit_status: std::process::ExitStatus, repository: &mut Repository, ) { - debug_log("=== CHERRY-PICK POST-COMMAND HOOK ==="); - debug_log(&format!("Exit status: {}", exit_status)); + tracing::debug!("=== CHERRY-PICK POST-COMMAND HOOK ==="); + tracing::debug!("Exit status: {}", exit_status); // Check if cherry-pick is still in progress let cherry_pick_head = repository.path().join("CHERRY_PICK_HEAD"); let sequencer_dir = repository.path().join("sequencer"); let is_in_progress = cherry_pick_head.exists() || sequencer_dir.exists(); - debug_log(&format!( + tracing::debug!( "Cherry-pick state check: CHERRY_PICK_HEAD={}, sequencer={}", cherry_pick_head.exists(), sequencer_dir.exists() - )); + ); if is_in_progress { // Cherry-pick still in progress (conflict or not finished) - debug_log( + tracing::debug!( "⏸ Cherry-pick still in progress, waiting for completion (conflict or multi-step)", ); return; } if is_dry_run(&parsed_args.command_args) { - debug_log("Skipping cherry-pick post-hook for dry-run"); + tracing::debug!("Skipping cherry-pick post-hook for dry-run"); return; } @@ -124,35 +125,32 @@ pub fn post_cherry_pick_hook( // Try to find the original head from the rewrite log let original_head = find_cherry_pick_start_event_original_head(repository); - debug_log(&format!("Original head from log: {:?}", original_head)); + tracing::debug!("Original head from log: {:?}", original_head); if !exit_status.success() { // Cherry-pick was aborted or failed - log Abort event if let Some(orig_head) = original_head { - debug_log(&format!("✗ Cherry-pick aborted/failed from {}", orig_head)); + tracing::debug!("✗ Cherry-pick aborted/failed from {}", orig_head); let abort_event = RewriteLogEvent::cherry_pick_abort( crate::git::rewrite_log::CherryPickAbortEvent::new(orig_head), ); match repository.storage.append_rewrite_event(abort_event) { - Ok(_) => debug_log("✓ Logged CherryPickAbort event"), - Err(e) => debug_log(&format!("✗ Failed to log CherryPickAbort event: {}", e)), + Ok(_) => tracing::debug!("✓ Logged CherryPickAbort event"), + Err(e) => tracing::debug!("✗ Failed to log CherryPickAbort event: {}", e), } } else { - debug_log("✗ Cherry-pick failed but couldn't determine original head"); + tracing::debug!("✗ Cherry-pick failed but couldn't determine original head"); } return; } // Cherry-pick completed successfully! - debug_log("✓ Cherry-pick completed successfully"); + tracing::debug!("✓ Cherry-pick completed successfully"); if let Some(original_head) = original_head { - debug_log(&format!( - "Processing completed cherry-pick from {}", - original_head - )); + tracing::debug!("Processing completed cherry-pick from {}", original_head); process_completed_cherry_pick(repository, &original_head, parsed_args); } else { - debug_log("⚠ Cherry-pick completed but couldn't determine original head"); + tracing::debug!("⚠ Cherry-pick completed but couldn't determine original head"); } } @@ -324,15 +322,15 @@ fn handle_cherry_pick_skip(repository: &mut Repository) { let skipped_sha = match std::fs::read_to_string(&cherry_pick_head) { Ok(content) => content.trim().to_string(), Err(_) => { - debug_log("Could not read CHERRY_PICK_HEAD for skip handling"); + tracing::debug!("Could not read CHERRY_PICK_HEAD for skip handling"); return; } }; - debug_log(&format!( + tracing::debug!( "Skipping commit {}, updating CherryPickStart source_commits", skipped_sha - )); + ); // Find the most recent CherryPickStart event (events are newest-first). // Stop if we hit a Complete or Abort (no active cherry-pick). @@ -359,7 +357,7 @@ fn handle_cherry_pick_skip(repository: &mut Repository) { let start = match current_start { Some(s) => s, None => { - debug_log("No active CherryPickStart event found for skip handling"); + tracing::debug!("No active CherryPickStart event found for skip handling"); return; } }; @@ -373,19 +371,19 @@ fn handle_cherry_pick_skip(repository: &mut Repository) { .collect(); if updated_source_commits.len() == start.source_commits.len() { - debug_log(&format!( + tracing::debug!( "Skipped commit {} not found in source_commits, no update needed", skipped_sha - )); + ); return; } - debug_log(&format!( + tracing::debug!( "Updated source_commits: {} -> {} (removed {})", start.source_commits.len(), updated_source_commits.len(), skipped_sha - )); + ); // Write a new CherryPickStart event with updated source_commits. // Since find_cherry_pick_start_event_source_commits returns the most recent @@ -397,8 +395,8 @@ fn handle_cherry_pick_skip(repository: &mut Repository) { )); match repository.storage.append_rewrite_event(new_start_event) { - Ok(_) => debug_log("Updated CherryPickStart event for --skip"), - Err(e) => debug_log(&format!("Failed to update CherryPickStart event: {}", e)), + Ok(_) => tracing::debug!("Updated CherryPickStart event for --skip"), + Err(e) => tracing::debug!("Failed to update CherryPickStart event: {}", e), } } @@ -407,75 +405,76 @@ fn process_completed_cherry_pick( original_head: &str, parsed_args: &ParsedGitInvocation, ) { - debug_log(&format!( + tracing::debug!( "--- Processing completed cherry-pick from {} ---", original_head - )); + ); // Get the new HEAD let new_head = match repository.head() { Ok(head) => match head.target() { Ok(target) => { - debug_log(&format!("New HEAD: {}", target)); + tracing::debug!("New HEAD: {}", target); target } Err(e) => { - debug_log(&format!("✗ Failed to get HEAD target: {}", e)); + tracing::debug!("✗ Failed to get HEAD target: {}", e); return; } }, Err(e) => { - debug_log(&format!("✗ Failed to get HEAD: {}", e)); + tracing::debug!("✗ Failed to get HEAD: {}", e); return; } }; // If HEAD didn't change, nothing to do if original_head == new_head { - debug_log("Cherry-pick resulted in no changes"); + tracing::debug!("Cherry-pick resulted in no changes"); return; } // Get source commits from the Start event let source_commits = match find_cherry_pick_start_event_source_commits(repository) { Some(commits) => { - debug_log(&format!("Source commits from log: {:?}", commits)); + tracing::debug!("Source commits from log: {:?}", commits); commits } None => { - debug_log("✗ Could not find source commits from CherryPickStart event"); + tracing::debug!("✗ Could not find source commits from CherryPickStart event"); return; } }; // Build commit mappings - debug_log(&format!( + tracing::debug!( "Building commit mappings: {} -> {}", - original_head, new_head - )); + original_head, + new_head + ); let new_commits = match build_cherry_pick_commit_mappings(repository, original_head, &new_head) { Ok(commits) => { - debug_log(&format!( + tracing::debug!( "✓ Built mappings: {} source commits -> {} new commits", source_commits.len(), commits.len() - )); + ); commits } Err(e) => { - debug_log(&format!("✗ Failed to build cherry-pick mappings: {}", e)); + tracing::debug!("✗ Failed to build cherry-pick mappings: {}", e); return; } }; if new_commits.is_empty() { - debug_log("No commits to rewrite authorship for"); + tracing::debug!("No commits to rewrite authorship for"); return; } - debug_log(&format!("Source commits: {:?}", source_commits)); - debug_log(&format!("New commits: {:?}", new_commits)); + tracing::debug!("Source commits: {:?}", source_commits); + tracing::debug!("New commits: {:?}", new_commits); let cherry_pick_event = RewriteLogEvent::cherry_pick_complete( crate::git::rewrite_log::CherryPickCompleteEvent::new( @@ -486,7 +485,7 @@ fn process_completed_cherry_pick( ), ); - debug_log("Creating CherryPickComplete event and rewriting authorship..."); + tracing::debug!("Creating CherryPickComplete event and rewriting authorship..."); let commit_author = get_commit_default_author(repository, &parsed_args.command_args); repository.handle_rewrite_log_event( @@ -496,7 +495,7 @@ fn process_completed_cherry_pick( true, // save to log ); - debug_log("✓ Cherry-pick authorship rewrite complete"); + tracing::debug!("✓ Cherry-pick authorship rewrite complete"); } fn build_cherry_pick_commit_mappings( @@ -511,10 +510,7 @@ fn build_cherry_pick_commit_mappings( let mut new_commits = new_commits; new_commits.reverse(); - debug_log(&format!( - "Cherry-pick created {} new commits", - new_commits.len() - )); + tracing::debug!("Cherry-pick created {} new commits", new_commits.len()); Ok(new_commits) } diff --git a/src/commands/hooks/clone_hooks.rs b/src/commands/hooks/clone_hooks.rs index 84aa68803..49810c9ad 100644 --- a/src/commands/hooks/clone_hooks.rs +++ b/src/commands/hooks/clone_hooks.rs @@ -2,7 +2,6 @@ use crate::config; use crate::git::cli_parser::{ParsedGitInvocation, extract_clone_target_directory}; use crate::git::repository::find_repository_in_path; use crate::git::sync_authorship::fetch_authorship_notes; -use crate::utils::debug_log; pub fn post_clone_hook(parsed_args: &ParsedGitInvocation, exit_status: std::process::ExitStatus) { // Only run if clone succeeded @@ -14,26 +13,27 @@ pub fn post_clone_hook(parsed_args: &ParsedGitInvocation, exit_status: std::proc let target_dir = match extract_clone_target_directory(&parsed_args.command_args) { Some(dir) => dir, None => { - debug_log( + tracing::debug!( "failed to extract target directory from clone command; skipping authorship fetch", ); return; } }; - debug_log(&format!( + tracing::debug!( "post-clone: attempting to fetch authorship notes for cloned repository at: {}", target_dir - )); + ); // Open the newly cloned repository let repository = match find_repository_in_path(&target_dir) { Ok(repo) => repo, Err(e) => { - debug_log(&format!( + tracing::debug!( "failed to open cloned repository at {}: {}; skipping authorship fetch", - target_dir, e - )); + target_dir, + e + ); return; } }; @@ -41,7 +41,7 @@ pub fn post_clone_hook(parsed_args: &ParsedGitInvocation, exit_status: std::proc // Check if the newly cloned repository is allowed by allow_repositories config let config = config::Config::get(); if !config.is_allowed_repository(&Some(repository.clone())) { - debug_log( + tracing::debug!( "Skipping authorship fetch for cloned repository: not in allow_repositories list", ); return; @@ -58,12 +58,12 @@ pub fn post_clone_hook(parsed_args: &ParsedGitInvocation, exit_status: std::proc // Fetch authorship notes from origin if let Err(e) = fetch_authorship_notes(&repository, "origin") { - debug_log(&format!("authorship fetch from origin failed: {}", e)); + tracing::debug!("authorship fetch from origin failed: {}", e); if !suppress_output { eprintln!(", failed."); } } else { - debug_log("successfully fetched authorship notes from origin"); + tracing::debug!("successfully fetched authorship notes from origin"); if !suppress_output { eprintln!(", done."); } diff --git a/src/commands/hooks/commit_hooks.rs b/src/commands/hooks/commit_hooks.rs index 8edd65026..7e8aff18e 100644 --- a/src/commands/hooks/commit_hooks.rs +++ b/src/commands/hooks/commit_hooks.rs @@ -3,7 +3,6 @@ use crate::commands::git_handlers::CommandHooksContext; use crate::git::cli_parser::{ParsedGitInvocation, is_dry_run}; use crate::git::repository::Repository; use crate::git::rewrite_log::RewriteLogEvent; -use crate::utils::debug_log; pub fn commit_pre_command_hook( parsed_args: &ParsedGitInvocation, @@ -51,7 +50,7 @@ pub fn commit_post_command_hook( if let Some(pre_commit_hook_result) = command_hooks_context.pre_commit_hook_result && !pre_commit_hook_result { - debug_log("Skipping git-ai post-commit hook because pre-commit hook failed"); + tracing::debug!("Skipping git-ai post-commit hook because pre-commit hook failed"); return; } diff --git a/src/commands/hooks/fetch_hooks.rs b/src/commands/hooks/fetch_hooks.rs index f19a2a488..48014aaae 100644 --- a/src/commands/hooks/fetch_hooks.rs +++ b/src/commands/hooks/fetch_hooks.rs @@ -7,7 +7,6 @@ use crate::git::cli_parser::{ParsedGitInvocation, is_dry_run}; use crate::git::repository::{Repository, exec_git, find_repository}; use crate::git::rewrite_log::RewriteLogEvent; use crate::git::sync_authorship::{fetch_authorship_notes, fetch_remote_from_args}; -use crate::utils::debug_log; pub fn fetch_pull_pre_command_hook( parsed_args: &ParsedGitInvocation, @@ -24,7 +23,7 @@ pub fn fetch_pull_pre_command_hook( let remote = match fetch_remote_from_args(repository, parsed_args) { Ok(remote) => remote, Err(_) => { - debug_log("failed to extract remote for authorship fetch; skipping"); + tracing::debug!("failed to extract remote for authorship fetch; skipping"); return None; } }; @@ -34,17 +33,14 @@ pub fn fetch_pull_pre_command_hook( // Spawn background thread to fetch authorship notes in parallel with main fetch Some(std::thread::spawn(move || { - debug_log(&format!( - "started fetching authorship notes from remote: {}", - remote - )); + tracing::debug!("started fetching authorship notes from remote: {}", remote); // Recreate repository in the background thread if let Ok(repo) = find_repository(&global_args) { if let Err(e) = fetch_authorship_notes(&repo, &remote) { - debug_log(&format!("authorship fetch failed: {}", e)); + tracing::debug!("authorship fetch failed: {}", e); } } else { - debug_log("failed to open repository for authorship fetch"); + tracing::debug!("failed to open repository for authorship fetch"); } })) } @@ -68,10 +64,12 @@ pub fn pull_pre_command_hook( let config = get_pull_rebase_autostash_config(parsed_args, repository); let has_changes = has_uncommitted_changes(repository); - debug_log(&format!( + tracing::debug!( "pull pre-hook: rebase={}, autostash={}, has_changes={}", - config.is_rebase, config.is_autostash, has_changes - )); + config.is_rebase, + config.is_autostash, + has_changes + ); // Write RebaseStart so that `git rebase --continue` (after conflict) // can recover the correct original_head from the rewrite log. @@ -85,16 +83,13 @@ pub fn pull_pre_command_hook( crate::git::rewrite_log::RebaseStartEvent::new_with_onto(head_sha, false, None), ); if let Err(e) = repository.storage.append_rewrite_event(start_event) { - debug_log(&format!( - "pull pre-hook: failed to write RebaseStart: {}", - e - )); + tracing::debug!("pull pre-hook: failed to write RebaseStart: {}", e); } } // Only capture VA if we're in rebase+autostash mode AND have uncommitted changes if config.is_rebase && config.is_autostash && has_changes { - debug_log( + tracing::debug!( "Detected pull --rebase --autostash with uncommitted changes, capturing VirtualAttributions", ); @@ -102,7 +97,7 @@ pub fn pull_pre_command_hook( let head_sha = match repository.head().ok().and_then(|h| h.target().ok()) { Some(sha) => sha, None => { - debug_log("Failed to get HEAD for VA capture"); + tracing::debug!("Failed to get HEAD for VA capture"); return; } }; @@ -116,17 +111,17 @@ pub fn pull_pre_command_hook( ) { Ok(va) => { if !va.attributions.is_empty() { - debug_log(&format!( + tracing::debug!( "Captured VA with {} files for autostash preservation", va.attributions.len() - )); + ); command_hooks_context.stashed_va = Some(va); } else { - debug_log("No attributions in working log to preserve"); + tracing::debug!("No attributions in working log to preserve"); } } Err(e) => { - debug_log(&format!("Failed to build VirtualAttributions: {}", e)); + tracing::debug!("Failed to build VirtualAttributions: {}", e); } } } @@ -180,10 +175,10 @@ pub fn pull_post_command_hook( // consecutive RebaseStart events (find_rebase_start_event would // return the stale one with onto_head=None). cancel_speculative_rebase_start(repository); - debug_log(&format!( + tracing::debug!( "Pull --rebase paused (conflict); logging RebaseStart with original_head={}", original_head - )); + ); let onto_head = resolve_pull_rebase_onto_head(repository); let start_event = RewriteLogEvent::rebase_start( crate::git::rewrite_log::RebaseStartEvent::new_with_onto( @@ -224,7 +219,7 @@ pub fn pull_post_command_hook( }; if old_head == new_head { - debug_log("HEAD unchanged, skipping post-pull authorship handling"); + tracing::debug!("HEAD unchanged, skipping post-pull authorship handling"); if config.is_rebase { cancel_speculative_rebase_start(repository); } @@ -245,10 +240,7 @@ pub fn pull_post_command_hook( // Check for fast-forward pull and rename working log if applicable if was_fast_forward_pull(repository, &new_head) { - debug_log(&format!( - "Fast-forward detected: {} -> {}", - old_head, new_head - )); + tracing::debug!("Fast-forward detected: {} -> {}", old_head, new_head); let _ = repository.storage.rename_working_log(&old_head, &new_head); return; } @@ -286,10 +278,11 @@ fn was_fast_forward_pull(repository: &Repository, expected_new_head: &str) -> bo // Verify the SHA matches our expected new HEAD if sha != expected_new_head { - debug_log(&format!( + tracing::debug!( "Reflog SHA {} doesn't match expected HEAD {}", - sha, expected_new_head - )); + sha, + expected_new_head + ); return false; } @@ -380,10 +373,11 @@ fn has_uncommitted_changes(repository: &Repository) -> bool { /// Rewrite authorship for committed local changes that were rebased by `git pull --rebase`. /// Uses the same commit-mapping and rewrite logic as `rebase_hooks::process_completed_rebase`. fn process_completed_pull_rebase(repository: &mut Repository, original_head: &str, new_head: &str) { - debug_log(&format!( + tracing::debug!( "Processing pull --rebase authorship: {} -> {}", - original_head, new_head - )); + original_head, + new_head + ); let onto_head = resolve_pull_rebase_onto_head(repository); let (original_commits, new_commits) = match build_rebase_commit_mappings( @@ -393,25 +387,25 @@ fn process_completed_pull_rebase(repository: &mut Repository, original_head: &st onto_head.as_deref(), ) { Ok(mappings) => { - debug_log(&format!( + tracing::debug!( "Pull rebase mappings: {} original -> {} new commits", mappings.0.len(), mappings.1.len() - )); + ); mappings } Err(e) => { - debug_log(&format!("Failed to build pull rebase mappings: {}", e)); + tracing::debug!("Failed to build pull rebase mappings: {}", e); return; } }; if original_commits.is_empty() { - debug_log("No committed changes to rewrite authorship for after pull --rebase"); + tracing::debug!("No committed changes to rewrite authorship for after pull --rebase"); return; } if new_commits.is_empty() { - debug_log("No newly rebased commits to rewrite authorship for after pull --rebase"); + tracing::debug!("No newly rebased commits to rewrite authorship for after pull --rebase"); return; } @@ -432,7 +426,7 @@ fn process_completed_pull_rebase(repository: &mut Repository, original_head: &st true, // save to log ); - debug_log("Pull --rebase authorship rewrite complete"); + tracing::debug!("Pull --rebase authorship rewrite complete"); } /// Cancel the speculative `RebaseStart` written by the pre-hook by appending @@ -440,7 +434,7 @@ fn process_completed_pull_rebase(repository: &mut Repository, original_head: &st /// next standalone `git rebase` operation. fn cancel_speculative_rebase_start(repository: &Repository) { if let Some(original_head) = &repository.pre_command_base_commit { - debug_log("pull post-hook: cancelling speculative RebaseStart (no conflict)"); + tracing::debug!("pull post-hook: cancelling speculative RebaseStart (no conflict)"); let abort_event = RewriteLogEvent::rebase_abort( crate::git::rewrite_log::RebaseAbortEvent::new(original_head.clone()), ); diff --git a/src/commands/hooks/merge_hooks.rs b/src/commands/hooks/merge_hooks.rs index 00683584a..cc3c8112c 100644 --- a/src/commands/hooks/merge_hooks.rs +++ b/src/commands/hooks/merge_hooks.rs @@ -5,7 +5,6 @@ use crate::{ repository::Repository, rewrite_log::{MergeSquashEvent, RewriteLogEvent}, }, - utils::debug_log, }; pub fn post_merge_hook( @@ -37,10 +36,7 @@ pub fn post_merge_hook( let staged_file_blobs = match repository.get_all_staged_file_blob_oids() { Ok(staged_file_blobs) => staged_file_blobs, Err(error) => { - debug_log(&format!( - "Failed to snapshot merge --squash staged blobs: {}", - error - )); + tracing::debug!("Failed to snapshot merge --squash staged blobs: {}", error); return; } }; diff --git a/src/commands/hooks/plumbing_rewrite_hooks.rs b/src/commands/hooks/plumbing_rewrite_hooks.rs index 704b87c48..811a1736d 100644 --- a/src/commands/hooks/plumbing_rewrite_hooks.rs +++ b/src/commands/hooks/plumbing_rewrite_hooks.rs @@ -1,7 +1,6 @@ use crate::commands::hooks::rebase_hooks::build_rebase_commit_mappings; use crate::git::repository::Repository; use crate::git::rewrite_log::{RebaseCompleteEvent, RewriteLogEvent}; -use crate::utils::debug_log; pub(crate) fn apply_wrapper_plumbing_rewrite_if_possible( repository: &mut Repository, @@ -24,11 +23,11 @@ pub(crate) fn apply_wrapper_plumbing_rewrite_if_possible( return false; } - debug_log(&format!( + tracing::debug!( "Applying wrapper plumbing rewrite handling: {} original commits -> {} new commits", original_commits.len(), new_commits.len() - )); + ); repository.handle_rewrite_log_event( RewriteLogEvent::rebase_complete(RebaseCompleteEvent::new( diff --git a/src/commands/hooks/push_hooks.rs b/src/commands/hooks/push_hooks.rs index 7d3de45f8..42909ecd6 100644 --- a/src/commands/hooks/push_hooks.rs +++ b/src/commands/hooks/push_hooks.rs @@ -3,7 +3,6 @@ use crate::commands::upgrade; use crate::git::cli_parser::{ParsedGitInvocation, is_dry_run}; use crate::git::repository::{Repository, find_repository}; use crate::git::sync_authorship::push_authorship_notes; -use crate::utils::debug_log; pub fn push_pre_command_hook( parsed_args: &ParsedGitInvocation, @@ -18,10 +17,7 @@ pub fn push_pre_command_hook( let remote = resolve_push_remote(parsed_args, repository); if let Some(remote) = remote { - debug_log(&format!( - "started pushing authorship notes to remote: {}", - remote - )); + tracing::debug!("started pushing authorship notes to remote: {}", remote); // Clone what we need for the background thread let global_args = repository.global_args_for_exec(); @@ -30,15 +26,15 @@ pub fn push_pre_command_hook( // Recreate repository in the background thread if let Ok(repo) = find_repository(&global_args) { if let Err(e) = push_authorship_notes(&repo, &remote) { - debug_log(&format!("authorship push failed: {}", e)); + tracing::debug!("authorship push failed: {}", e); } } else { - debug_log("failed to open repository for authorship push"); + tracing::debug!("failed to open repository for authorship push"); } })) } else { // No remotes configured; skip silently - debug_log("no remotes found for authorship push; skipping"); + tracing::debug!("no remotes found for authorship push; skipping"); None } } @@ -51,17 +47,14 @@ pub fn run_pre_push_hook_managed(parsed_args: &ParsedGitInvocation, repository: } let Some(remote) = resolve_push_remote(parsed_args, repository) else { - debug_log("no remotes found for authorship push; skipping"); + tracing::debug!("no remotes found for authorship push; skipping"); return; }; - debug_log(&format!( - "started pushing authorship notes to remote: {}", - remote - )); + tracing::debug!("started pushing authorship notes to remote: {}", remote); if let Err(e) = push_authorship_notes(repository, &remote) { - debug_log(&format!("authorship push failed: {}", e)); + tracing::debug!("authorship push failed: {}", e); } } diff --git a/src/commands/hooks/rebase_hooks.rs b/src/commands/hooks/rebase_hooks.rs index 9ddbfcc40..576ae5289 100644 --- a/src/commands/hooks/rebase_hooks.rs +++ b/src/commands/hooks/rebase_hooks.rs @@ -4,34 +4,35 @@ use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::git::cli_parser::{ParsedGitInvocation, RebaseArgsSummary, is_dry_run}; use crate::git::repository::Repository; use crate::git::rewrite_log::RewriteLogEvent; -use crate::utils::debug_log; pub fn pre_rebase_hook( parsed_args: &ParsedGitInvocation, repository: &mut Repository, command_hooks_context: &mut CommandHooksContext, ) { - debug_log("=== REBASE PRE-COMMAND HOOK ==="); + tracing::debug!("=== REBASE PRE-COMMAND HOOK ==="); // Check if we're continuing an existing rebase or starting a new one let rebase_dir = repository.path().join("rebase-merge"); let rebase_apply_dir = repository.path().join("rebase-apply"); let rebase_in_progress = rebase_dir.exists() || rebase_apply_dir.exists(); - debug_log(&format!( + tracing::debug!( "Rebase directories check: rebase-merge={}, rebase-apply={}", rebase_dir.exists(), rebase_apply_dir.exists() - )); + ); // Check if there's an active Start event in the log that matches let has_active_start = has_active_rebase_start_event(repository); let is_continuing = rebase_in_progress && has_active_start; - debug_log(&format!( + tracing::debug!( "Rebase state: in_progress={}, has_active_start={}, is_continuing={}", - rebase_in_progress, has_active_start, is_continuing - )); + rebase_in_progress, + has_active_start, + is_continuing + ); if !is_continuing { // Starting a new rebase - capture original HEAD and log Start event @@ -40,10 +41,12 @@ pub fn pre_rebase_hook( let original_head = resolve_rebase_original_head(parsed_args, repository) .unwrap_or_else(|| target.clone()); let onto_head = resolve_rebase_onto_head(parsed_args, repository); - debug_log(&format!( + tracing::debug!( "Starting new rebase from HEAD: {} (resolved original_head: {}, onto: {:?})", - target, original_head, onto_head - )); + target, + original_head, + onto_head + ); command_hooks_context.rebase_original_head = Some(original_head.clone()); command_hooks_context.rebase_onto = onto_head.clone(); @@ -51,7 +54,7 @@ pub fn pre_rebase_hook( let is_interactive = parsed_args.has_command_flag("-i") || parsed_args.has_command_flag("--interactive"); - debug_log(&format!("Interactive rebase: {}", is_interactive)); + tracing::debug!("Interactive rebase: {}", is_interactive); // Log the rebase start event let start_event = RewriteLogEvent::rebase_start( @@ -64,15 +67,17 @@ pub fn pre_rebase_hook( // Write to rewrite log match repository.storage.append_rewrite_event(start_event) { - Ok(_) => debug_log("✓ Logged RebaseStart event"), - Err(e) => debug_log(&format!("✗ Failed to log RebaseStart event: {}", e)), + Ok(_) => tracing::debug!("✓ Logged RebaseStart event"), + Err(e) => tracing::debug!("✗ Failed to log RebaseStart event: {}", e), } } } else { - debug_log("Could not read HEAD for new rebase"); + tracing::debug!("Could not read HEAD for new rebase"); } } else { - debug_log("Continuing existing rebase (will read original head from log in post-hook)"); + tracing::debug!( + "Continuing existing rebase (will read original head from log in post-hook)" + ); } } @@ -82,28 +87,30 @@ pub fn handle_rebase_post_command( exit_status: std::process::ExitStatus, repository: &mut Repository, ) { - debug_log("=== REBASE POST-COMMAND HOOK ==="); - debug_log(&format!("Exit status: {}", exit_status)); + tracing::debug!("=== REBASE POST-COMMAND HOOK ==="); + tracing::debug!("Exit status: {}", exit_status); // Check if rebase is still in progress let rebase_dir = repository.path().join("rebase-merge"); let rebase_apply_dir = repository.path().join("rebase-apply"); let is_in_progress = rebase_dir.exists() || rebase_apply_dir.exists(); - debug_log(&format!( + tracing::debug!( "Rebase directories check: rebase-merge={}, rebase-apply={}", rebase_dir.exists(), rebase_apply_dir.exists() - )); + ); if is_in_progress { // Rebase still in progress (conflict or not finished) - debug_log("⏸ Rebase still in progress, waiting for completion (conflict or multi-step)"); + tracing::debug!( + "⏸ Rebase still in progress, waiting for completion (conflict or multi-step)" + ); return; } if is_dry_run(&parsed_args.command_args) { - debug_log("Skipping rebase post-hook for dry-run"); + tracing::debug!("Skipping rebase post-hook for dry-run"); return; } @@ -119,13 +126,13 @@ pub fn handle_rebase_post_command( .as_ref() .and_then(|event| event.onto_head.clone()); - debug_log(&format!( + tracing::debug!( "Original head: context={:?}, log={:?}; onto: context={:?}, log={:?}", original_head_from_context, original_head_from_log, onto_head_from_context, onto_head_from_log - )); + ); let original_head = original_head_from_context.or(original_head_from_log); let onto_head = onto_head_from_context.or(onto_head_from_log); @@ -133,27 +140,24 @@ pub fn handle_rebase_post_command( if !exit_status.success() { // Rebase was aborted or failed - log Abort event if let Some(orig_head) = original_head { - debug_log(&format!("✗ Rebase aborted/failed from {}", orig_head)); + tracing::debug!("✗ Rebase aborted/failed from {}", orig_head); let abort_event = RewriteLogEvent::rebase_abort( crate::git::rewrite_log::RebaseAbortEvent::new(orig_head), ); match repository.storage.append_rewrite_event(abort_event) { - Ok(_) => debug_log("✓ Logged RebaseAbort event"), - Err(e) => debug_log(&format!("✗ Failed to log RebaseAbort event: {}", e)), + Ok(_) => tracing::debug!("✓ Logged RebaseAbort event"), + Err(e) => tracing::debug!("✗ Failed to log RebaseAbort event: {}", e), } } else { - debug_log("✗ Rebase failed but couldn't determine original head"); + tracing::debug!("✗ Rebase failed but couldn't determine original head"); } return; } // Rebase completed successfully! - debug_log("✓ Rebase completed successfully"); + tracing::debug!("✓ Rebase completed successfully"); if let Some(original_head) = original_head { - debug_log(&format!( - "Processing completed rebase from {}", - original_head - )); + tracing::debug!("Processing completed rebase from {}", original_head); process_completed_rebase( repository, &original_head, @@ -161,7 +165,7 @@ pub fn handle_rebase_post_command( parsed_args, ); } else { - debug_log("⚠ Rebase completed but couldn't determine original head"); + tracing::debug!("⚠ Rebase completed but couldn't determine original head"); } } @@ -215,79 +219,79 @@ fn process_completed_rebase( onto_head: Option<&str>, parsed_args: &ParsedGitInvocation, ) { - debug_log(&format!( - "--- Processing completed rebase from {} ---", - original_head - )); + tracing::debug!("--- Processing completed rebase from {} ---", original_head); // Get the new HEAD let new_head = match repository.head() { Ok(head) => match head.target() { Ok(target) => { - debug_log(&format!("New HEAD: {}", target)); + tracing::debug!("New HEAD: {}", target); target } Err(e) => { - debug_log(&format!("✗ Failed to get HEAD target: {}", e)); + tracing::debug!("✗ Failed to get HEAD target: {}", e); return; } }, Err(e) => { - debug_log(&format!("✗ Failed to get HEAD: {}", e)); + tracing::debug!("✗ Failed to get HEAD: {}", e); return; } }; // If HEAD didn't change, nothing to do if original_head == new_head { - debug_log("Rebase resulted in no changes (fast-forward or empty)"); + tracing::debug!("Rebase resulted in no changes (fast-forward or empty)"); return; } // Build commit mappings - debug_log(&format!( + tracing::debug!( "Building commit mappings: {} -> {}", - original_head, new_head - )); + original_head, + new_head + ); let (original_commits, new_commits) = match build_rebase_commit_mappings(repository, original_head, &new_head, onto_head) { Ok(mappings) => { - debug_log(&format!( + tracing::debug!( "✓ Built mappings: {} original commits -> {} new commits", mappings.0.len(), mappings.1.len() - )); + ); mappings } Err(e) => { - debug_log(&format!("✗ Failed to build rebase mappings: {}", e)); + tracing::debug!("✗ Failed to build rebase mappings: {}", e); return; } }; if original_commits.is_empty() { - debug_log("No commits to rewrite authorship for"); + tracing::debug!("No commits to rewrite authorship for"); return; } if new_commits.is_empty() { - debug_log("No new rebased commits detected (all commits were skipped/already upstream)"); + tracing::debug!( + "No new rebased commits detected (all commits were skipped/already upstream)" + ); return; } - debug_log(&format!("Original commits: {:?}", original_commits)); - debug_log(&format!("New commits: {:?}", new_commits)); + tracing::debug!("Original commits: {:?}", original_commits); + tracing::debug!("New commits: {:?}", new_commits); // Determine rebase type let is_interactive = parsed_args.has_command_flag("-i") || parsed_args.has_command_flag("--interactive"); - debug_log(&format!( + tracing::debug!( "Rebase type: {}", if is_interactive { "interactive" } else { "normal" } - )); + ); let rebase_event = RewriteLogEvent::rebase_complete(crate::git::rewrite_log::RebaseCompleteEvent::new( @@ -298,7 +302,7 @@ fn process_completed_rebase( new_commits.clone(), )); - debug_log("Creating RebaseComplete event and rewriting authorship..."); + tracing::debug!("Creating RebaseComplete event and rewriting authorship..."); let commit_author = get_commit_default_author(repository, &parsed_args.command_args); repository.handle_rewrite_log_event( @@ -308,7 +312,7 @@ fn process_completed_rebase( true, // save to log ); - debug_log("✓ Rebase authorship rewrite complete"); + tracing::debug!("✓ Rebase authorship rewrite complete"); } fn original_equivalent_for_rewritten_commit( @@ -383,10 +387,11 @@ pub(crate) fn build_rebase_commit_mappings( // If there were no original commits, there is nothing to rewrite. // Avoid walking potentially large parts of new history. if original_commits.is_empty() { - debug_log(&format!( + tracing::debug!( "Commit mapping: 0 original -> 0 new (merge_base: {}, original_base: {})", - merge_base, original_base - )); + merge_base, + original_base + ); return Ok((original_commits, Vec::new())); } @@ -402,14 +407,14 @@ pub(crate) fn build_rebase_commit_mappings( // Reverse so they're in chronological order (oldest first) new_commits.reverse(); - debug_log(&format!( + tracing::debug!( "Commit mapping: {} original -> {} new (merge_base: {}, original_base: {}, new_base: {})", original_commits.len(), new_commits.len(), merge_base, original_base, new_commits_base - )); + ); // Always pass all commits through - let the authorship rewriting logic // handle many-to-one, one-to-one, and other mapping scenarios properly diff --git a/src/commands/hooks/reset_hooks.rs b/src/commands/hooks/reset_hooks.rs index 626865724..35c910305 100644 --- a/src/commands/hooks/reset_hooks.rs +++ b/src/commands/hooks/reset_hooks.rs @@ -3,7 +3,6 @@ use crate::{ commands::hooks::commit_hooks, commands::hooks::plumbing_rewrite_hooks::apply_wrapper_plumbing_rewrite_if_possible, git::{cli_parser::ParsedGitInvocation, repository::Repository, rewrite_log::ResetKind}, - utils::debug_log, }; pub fn pre_reset_hook(parsed_args: &ParsedGitInvocation, repository: &mut Repository) { @@ -39,7 +38,7 @@ pub fn post_reset_hook( exit_status: std::process::ExitStatus, ) { if !exit_status.success() { - debug_log("Reset failed, skipping authorship handling"); + tracing::debug!("Reset failed, skipping authorship handling"); return; } @@ -48,20 +47,17 @@ pub fn post_reset_hook( // Extract pathspecs let pathspecs = extract_pathspecs(parsed_args).unwrap_or_else(|e| { - debug_log(&format!("Failed to extract pathspecs: {}", e)); + tracing::debug!("Failed to extract pathspecs: {}", e); Vec::new() }); - debug_log(&format!( - "Reset: tree-ish='{}', pathspecs={:?}", - tree_ish, pathspecs - )); + tracing::debug!("Reset: tree-ish='{}', pathspecs={:?}", tree_ish, pathspecs); // Get old HEAD (before reset) from pre-command hook let old_head_sha = match &repository.pre_command_base_commit { Some(sha) => sha.clone(), None => { - debug_log("No pre-command head captured, skipping authorship handling"); + tracing::debug!("No pre-command head captured, skipping authorship handling"); return; } }; @@ -70,7 +66,7 @@ pub fn post_reset_hook( let new_head_sha = match repository.head().ok().and_then(|h| h.target().ok()) { Some(sha) => sha, None => { - debug_log("No HEAD after reset, skipping authorship handling"); + tracing::debug!("No HEAD after reset, skipping authorship handling"); return; } }; @@ -82,14 +78,14 @@ pub fn post_reset_hook( None => { // Fallback to resolving tree-ish post-reset (for backwards compatibility) // This will be incorrect for relative refs but better than failing - debug_log(&format!( + tracing::debug!( "Warning: No pre-resolved target commit, attempting post-reset resolution of '{}'", tree_ish - )); + ); match resolve_tree_ish_to_commit(repository, &tree_ish) { Ok(sha) => sha, Err(e) => { - debug_log(&format!("Failed to resolve tree-ish '{}': {}", tree_ish, e)); + tracing::debug!("Failed to resolve tree-ish '{}': {}", tree_ish, e); return; } } @@ -166,10 +162,7 @@ fn handle_reset_hard(repository: &Repository, old_head_sha: &str, _target_commit .storage .delete_working_log_for_base_commit(old_head_sha); - debug_log(&format!( - "Reset --hard: deleted working log for {}", - old_head_sha - )); + tracing::debug!("Reset --hard: deleted working log for {}", old_head_sha); } /// Handle --soft, --mixed, --merge: preserve working directory and reconstruct working log @@ -182,15 +175,16 @@ fn handle_reset_preserve_working_dir( ) { // Sanity check: new HEAD should equal target after reset if new_head_sha != target_commit_sha { - debug_log(&format!( + tracing::debug!( "Warning: new HEAD ({}) != target commit ({})", - new_head_sha, target_commit_sha - )); + new_head_sha, + target_commit_sha + ); } // No-op if resetting to same commit if old_head_sha == target_commit_sha { - debug_log("Reset to same commit, no authorship changes needed"); + tracing::debug!("Reset to same commit, no authorship changes needed"); return; } @@ -207,13 +201,13 @@ fn handle_reset_preserve_working_dir( human_author, true, ) { - debug_log("Reset to non-ancestor commit, handled as wrapper plumbing rewrite"); + tracing::debug!("Reset to non-ancestor commit, handled as wrapper plumbing rewrite"); return; } // Fall back to re-keying the working log so uncommitted state is preserved even when // we cannot derive a safe commit mapping. - debug_log("Reset to non-ancestor commit, migrating working log"); + tracing::debug!("Reset to non-ancestor commit, migrating working log"); let _ = repository .storage .rename_working_log(old_head_sha, target_commit_sha); @@ -230,16 +224,13 @@ fn handle_reset_preserve_working_dir( None, ) { Ok(_) => { - debug_log(&format!( + tracing::debug!( "✓ Successfully reconstructed working log after reset to {}", target_commit_sha - )); + ); } Err(e) => { - debug_log(&format!( - "Failed to reconstruct working log after reset: {}", - e - )); + tracing::debug!("Failed to reconstruct working log after reset: {}", e); } } } @@ -254,17 +245,20 @@ fn handle_reset_pathspec_preserve_working_dir( human_author: &str, pathspecs: &[String], ) { - debug_log(&format!( + tracing::debug!( "Handling pathspec reset: old_head={}, target={}, pathspecs={:?}", - old_head_sha, target_commit_sha, pathspecs - )); + old_head_sha, + target_commit_sha, + pathspecs + ); // For pathspec resets, HEAD doesn't move if old_head_sha != new_head_sha { - debug_log(&format!( + tracing::debug!( "Warning: pathspec reset but HEAD moved from {} to {}", - old_head_sha, new_head_sha - )); + old_head_sha, + new_head_sha + ); } // For pathspec resets, HEAD doesn't move, so we're reconstructing for the current HEAD @@ -274,7 +268,7 @@ fn handle_reset_pathspec_preserve_working_dir( let is_backward = is_ancestor(repository, target_commit_sha, old_head_sha); if !is_backward { - debug_log("Pathspec reset forward or to unrelated commit, no reconstruction needed"); + tracing::debug!("Pathspec reset forward or to unrelated commit, no reconstruction needed"); return; } @@ -282,10 +276,7 @@ fn handle_reset_pathspec_preserve_working_dir( let working_log = match repository.storage.working_log_for_base_commit(old_head_sha) { Ok(wl) => wl, Err(e) => { - debug_log(&format!( - "Failed to get working log for {}: {}", - old_head_sha, e - )); + tracing::debug!("Failed to get working log for {}: {}", old_head_sha, e); return; } }; @@ -317,16 +308,16 @@ fn handle_reset_pathspec_preserve_working_dir( None, ) { Ok(_) => { - debug_log(&format!( + tracing::debug!( "✓ Reconstructed working log for pathspec reset: {:?}", pathspecs - )); + ); } Err(e) => { - debug_log(&format!( + tracing::debug!( "Failed to reconstruct working log for pathspec reset: {}", e - )); + ); return; } } @@ -338,10 +329,7 @@ fn handle_reset_pathspec_preserve_working_dir( { Ok(wl) => wl, Err(e) => { - debug_log(&format!( - "Failed to get working log for {}: {}", - target_commit_sha, e - )); + tracing::debug!("Failed to get working log for {}: {}", target_commit_sha, e); return; } }; @@ -359,10 +347,7 @@ fn handle_reset_pathspec_preserve_working_dir( let head_working_log = match repository.storage.working_log_for_base_commit(new_head_sha) { Ok(wl) => wl, Err(e) => { - debug_log(&format!( - "Failed to get working log for {}: {}", - new_head_sha, e - )); + tracing::debug!("Failed to get working log for {}: {}", new_head_sha, e); return; } }; @@ -378,10 +363,11 @@ fn handle_reset_pathspec_preserve_working_dir( .delete_working_log_for_base_commit(target_commit_sha); } - debug_log(&format!( + tracing::debug!( "✓ Updated working log for pathspec reset: {} pathspec checkpoints, {} non-pathspec checkpoints preserved", - pathspec_count, non_pathspec_count - )); + pathspec_count, + non_pathspec_count + ); } /// Resolve tree-ish to commit SHA diff --git a/src/commands/hooks/stash_hooks.rs b/src/commands/hooks/stash_hooks.rs index 557d46534..6d1812514 100644 --- a/src/commands/hooks/stash_hooks.rs +++ b/src/commands/hooks/stash_hooks.rs @@ -5,7 +5,6 @@ use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::error::GitAiError; use crate::git::cli_parser::ParsedGitInvocation; use crate::git::repository::{Repository, exec_git, exec_git_stdin}; -use crate::utils::debug_log; pub fn pre_stash_hook( parsed_args: &ParsedGitInvocation, @@ -34,7 +33,7 @@ pub fn pre_stash_hook( if let Ok(stash_sha) = resolve_stash_to_sha(repository, &stash_ref) { command_hooks_context.stash_sha = Some(stash_sha); - debug_log(&format!("Pre-stash: captured stash SHA for {}", subcommand)); + tracing::debug!("Pre-stash: captured stash SHA for {}", subcommand); } } else { let _ = match crate::commands::checkpoint::run( @@ -47,7 +46,7 @@ pub fn pre_stash_hook( ) { Ok(result) => result, Err(e) => { - debug_log(&format!("Failed to run checkpoint: {}", e)); + tracing::debug!("Failed to run checkpoint: {}", e); return; } }; @@ -75,20 +74,20 @@ pub fn post_stash_hook( let is_restore_subcommand = subcommand == "pop" || subcommand == "apply" || subcommand == "branch"; if is_restore_subcommand && has_stash_conflict(repository) { - debug_log(&format!( + tracing::debug!( "Stash {} had conflicts, but will still restore attributions", subcommand - )); + ); } else { - debug_log(&format!( + tracing::debug!( "Stash {} failed (non-conflict), skipping post-stash hook", subcommand - )); + ); return; } } - debug_log(&format!("Post-stash: processing stash {}", subcommand)); + tracing::debug!("Post-stash: processing stash {}", subcommand); // Handle different subcommands if subcommand == "push" || subcommand == "save" { @@ -97,24 +96,21 @@ pub fn post_stash_hook( let head_sha = match repository.head().and_then(|head| head.target()) { Ok(head_sha) => head_sha.to_string(), Err(e) => { - debug_log(&format!( - "Failed to resolve HEAD after stash {}: {}", - subcommand, e - )); + tracing::debug!("Failed to resolve HEAD after stash {}: {}", subcommand, e); return; } }; let stash_sha = match resolve_stash_to_sha(repository, "stash@{0}") { Ok(stash_sha) => stash_sha, Err(e) => { - debug_log(&format!("Failed to resolve created stash SHA: {}", e)); + tracing::debug!("Failed to resolve created stash SHA: {}", e); return; } }; // Stash was created - save authorship log as git note if let Err(e) = save_stash_authorship_log(repository, &head_sha, &stash_sha, &pathspecs) { - debug_log(&format!("Failed to save stash authorship log: {}", e)); + tracing::debug!("Failed to save stash authorship log: {}", e); } } else if subcommand == "pop" || subcommand == "apply" || subcommand == "branch" { // Stash was applied - restore attributions from git note @@ -122,28 +118,22 @@ pub fn post_stash_hook( let stash_sha = match &command_hooks_context.stash_sha { Some(sha) => sha.clone(), None => { - debug_log("No stash SHA captured in pre-hook, cannot restore attributions"); + tracing::debug!("No stash SHA captured in pre-hook, cannot restore attributions"); return; } }; - debug_log(&format!( - "Restoring attributions from stash SHA: {}", - stash_sha - )); + tracing::debug!("Restoring attributions from stash SHA: {}", stash_sha); let head_sha = match repository.head().and_then(|head| head.target()) { Ok(head_sha) => head_sha.to_string(), Err(e) => { - debug_log(&format!( - "Failed to resolve HEAD after stash {}: {}", - subcommand, e - )); + tracing::debug!("Failed to resolve HEAD after stash {}: {}", subcommand, e); return; } }; if let Err(e) = restore_stash_attributions(repository, &head_sha, &stash_sha) { - debug_log(&format!("Failed to restore stash attributions: {}", e)); + tracing::debug!("Failed to restore stash attributions: {}", e); } } } @@ -175,7 +165,7 @@ pub(crate) fn save_stash_authorship_log( stash_sha: &str, pathspecs: &[String], ) -> Result<(), GitAiError> { - debug_log(&format!("Stash created with SHA: {}", stash_sha)); + tracing::debug!("Stash created with SHA: {}", stash_sha); // Build VirtualAttributions from the working log before it was cleared let working_log_va = @@ -200,16 +190,16 @@ pub(crate) fn save_stash_authorship_log( // If there are no attributions, just clean up working log for filtered files if filtered_files.is_empty() { - debug_log("No attributions to save for stash"); + tracing::debug!("No attributions to save for stash"); delete_working_log_for_files(repo, head_sha, &filtered_files)?; return Ok(()); } - debug_log(&format!( + tracing::debug!( "Saving attributions for {} files (pathspecs: {:?})", filtered_files.len(), pathspecs - )); + ); // Convert to authorship log, filtering to only include matched files let mut authorship_log = working_log_va.to_authorship_log()?; @@ -223,17 +213,17 @@ pub(crate) fn save_stash_authorship_log( .map_err(|e| GitAiError::Generic(format!("Failed to serialize authorship log: {}", e)))?; save_stash_note(repo, stash_sha, &json)?; - debug_log(&format!( + tracing::debug!( "Saved authorship log to refs/notes/ai-stash for stash {}", stash_sha - )); + ); // Delete the working log entries for files that were stashed delete_working_log_for_files(repo, head_sha, &filtered_files)?; - debug_log(&format!( + tracing::debug!( "Deleted working log entries for {} files", filtered_files.len() - )); + ); Ok(()) } @@ -244,16 +234,13 @@ pub(crate) fn restore_stash_attributions( head_sha: &str, stash_sha: &str, ) -> Result<(), GitAiError> { - debug_log(&format!( - "Restoring stash attributions from SHA: {}", - stash_sha - )); + tracing::debug!("Restoring stash attributions from SHA: {}", stash_sha); // Try to read authorship log from git note (refs/notes/ai-stash) let note_content = match read_stash_note(repo, stash_sha) { Ok(content) => content, Err(_) => { - debug_log("No authorship log found in refs/notes/ai-stash for this stash"); + tracing::debug!("No authorship log found in refs/notes/ai-stash for this stash"); return Ok(()); } }; @@ -262,16 +249,16 @@ pub(crate) fn restore_stash_attributions( let authorship_log = match crate::authorship::authorship_log_serialization::AuthorshipLog::deserialize_from_string(¬e_content) { Ok(log) => log, Err(e) => { - debug_log(&format!("Failed to parse stash authorship log: {}", e)); + tracing::debug!("Failed to parse stash authorship log: {}", e); return Ok(()); } }; - debug_log(&format!( + tracing::debug!( "Loaded authorship log from stash: {} files, {} prompts", authorship_log.attestations.len(), authorship_log.metadata.prompts.len() - )); + ); // Convert authorship log to INITIAL attributions let mut initial_files = std::collections::HashMap::new(); @@ -319,10 +306,10 @@ pub(crate) fn restore_stash_attributions( initial_file_contents, )?; - debug_log(&format!( + tracing::debug!( "✓ Wrote INITIAL attributions to working log for {}", head_sha - )); + ); } Ok(()) @@ -464,7 +451,7 @@ pub(crate) fn extract_stash_pathspecs(parsed_args: &ParsedGitInvocation) -> Vec< pathspecs.push(arg.clone()); } - debug_log(&format!("Extracted pathspecs: {:?}", pathspecs)); + tracing::debug!("Extracted pathspecs: {:?}", pathspecs); pathspecs } diff --git a/src/commands/hooks/switch_hooks.rs b/src/commands/hooks/switch_hooks.rs index 89cba8db8..f9496f797 100644 --- a/src/commands/hooks/switch_hooks.rs +++ b/src/commands/hooks/switch_hooks.rs @@ -3,7 +3,6 @@ use crate::commands::git_handlers::CommandHooksContext; use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::git::cli_parser::ParsedGitInvocation; use crate::git::repository::Repository; -use crate::utils::debug_log; pub fn pre_switch_hook( parsed_args: &ParsedGitInvocation, @@ -25,12 +24,14 @@ fn capture_va_for_merge( repository: &Repository, command_hooks_context: &mut CommandHooksContext, ) { - debug_log("Detected switch --merge with uncommitted changes, capturing VirtualAttributions"); + tracing::debug!( + "Detected switch --merge with uncommitted changes, capturing VirtualAttributions" + ); let head_sha = match repository.head().ok().and_then(|h| h.target().ok()) { Some(sha) => sha, None => { - debug_log("Failed to get HEAD for VA capture"); + tracing::debug!("Failed to get HEAD for VA capture"); return; } }; @@ -43,17 +44,17 @@ fn capture_va_for_merge( ) { Ok(va) => { if !va.attributions.is_empty() { - debug_log(&format!( + tracing::debug!( "Captured VA with {} files for switch --merge preservation", va.attributions.len() - )); + ); command_hooks_context.stashed_va = Some(va); } else { - debug_log("No attributions in working log to preserve"); + tracing::debug!("No attributions in working log to preserve"); } } Err(e) => { - debug_log(&format!("Failed to build VirtualAttributions: {}", e)); + tracing::debug!("Failed to build VirtualAttributions: {}", e); } } } @@ -70,7 +71,7 @@ pub fn post_switch_hook( // markers, but we must still restore the stashed VA so attribution is not lost. // All other failed switches are skipped as before. if !exit_status.success() && !is_merge { - debug_log("Switch failed, skipping working log handling"); + tracing::debug!("Switch failed, skipping working log handling"); return; } @@ -86,16 +87,16 @@ pub fn post_switch_hook( // HEAD unchanged is always a no-op: no branch switch occurred. if old_head == new_head { - debug_log("HEAD unchanged after switch, no working log handling needed"); + tracing::debug!("HEAD unchanged after switch, no working log handling needed"); return; } // Force switch - delete working log (changes discarded) if is_force_switch(parsed_args) { - debug_log(&format!( + tracing::debug!( "Force switch detected, deleting working log for {}", &old_head - )); + ); let _ = repository .storage .delete_working_log_for_base_commit(&old_head); @@ -126,22 +127,21 @@ pub fn post_switch_hook( }); if let Some(stashed_va) = stashed_va { - debug_log("Restoring VA after switch --merge"); + tracing::debug!("Restoring VA after switch --merge"); let _ = repository .storage .delete_working_log_for_base_commit(&old_head); restore_stashed_va(repository, &old_head, &new_head, stashed_va); return; } - debug_log("switch --merge: no VA to restore, falling through to working log migration"); + tracing::debug!( + "switch --merge: no VA to restore, falling through to working log migration" + ); // Fall through to rename } // Normal branch switch - migrate working log - debug_log(&format!( - "Switch changed HEAD: {} -> {}", - &old_head, &new_head - )); + tracing::debug!("Switch changed HEAD: {} -> {}", &old_head, &new_head); let _ = repository.storage.rename_working_log(&old_head, &new_head); } diff --git a/src/commands/hooks/update_ref_hooks.rs b/src/commands/hooks/update_ref_hooks.rs index cba41714e..d0260b871 100644 --- a/src/commands/hooks/update_ref_hooks.rs +++ b/src/commands/hooks/update_ref_hooks.rs @@ -3,7 +3,6 @@ use crate::commands::hooks::commit_hooks::get_commit_default_author; use crate::commands::hooks::plumbing_rewrite_hooks::apply_wrapper_plumbing_rewrite_if_possible; use crate::git::cli_parser::ParsedGitInvocation; use crate::git::repository::Repository; -use crate::utils::debug_log; #[derive(Debug, Clone, PartialEq, Eq)] struct ParsedUpdateRefCommand { @@ -79,10 +78,12 @@ pub fn post_update_ref_hook( } if is_ancestor(repository, &new_target, &old_target) { - debug_log(&format!( + tracing::debug!( "Skipping wrapper update-ref rewind handling for {}: {} -> {}", - ref_name, old_target, new_target - )); + ref_name, + old_target, + new_target + ); return; } @@ -94,10 +95,10 @@ pub fn post_update_ref_hook( &commit_author, true, ) { - debug_log(&format!( + tracing::debug!( "Skipping wrapper update-ref rewrite handling for {}: could not derive safe mappings", ref_name - )); + ); } } diff --git a/src/commands/prompts_db.rs b/src/commands/prompts_db.rs index f38932da5..da15de43d 100644 --- a/src/commands/prompts_db.rs +++ b/src/commands/prompts_db.rs @@ -827,10 +827,10 @@ fn fetch_from_git_notes( } eprintln!(" found {} notes in history", live_notes.len()); if orphan_count > 0 { - crate::utils::debug_log(&format!( + tracing::debug!( "{} orphaned notes (skipped, commit no longer reachable)", orphan_count - )); + ); } if live_notes.is_empty() { return Ok((0, deferred)); @@ -987,12 +987,10 @@ fn fetch_from_git_notes( /// SQLite path: `InternalDatabase::get_prompt(id)` → upsert if a body exists. /// /// Per-prompt accounting (NOT per-hash) is reported in the summary line. -/// Skip reasons are aggregated and emitted via `debug_log` (one line per category) -/// only when `GIT_AI_DEBUG=1` is set. +/// Skip reasons are aggregated and emitted via `tracing::debug!` (one line per category). fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { use crate::api::client::{ApiClient, ApiContext}; use crate::api::types::CasMessagesObject; - use crate::utils::debug_log; // Determine the configured instance prefix once (e.g., "https://api.git-ai.com/cas/") let instance_prefix = { @@ -1009,10 +1007,12 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { if dp.messages_url.starts_with(&instance_prefix) { cas_indices.push(i); } else { - debug_log(&format!( + tracing::debug!( "prompts: hostname mismatch id={} url={} expected_prefix={}", - dp.id, dp.messages_url, instance_prefix - )); + dp.id, + dp.messages_url, + instance_prefix + ); foreign_indices.push(i); } } @@ -1053,10 +1053,11 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { for &idx in &foreign_indices { if !foreign_resolved.contains(&idx) { let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: unresolved (foreign url + no local body) id={} url={}", - dp.id, dp.messages_url - )); + dp.id, + dp.messages_url + ); } } } @@ -1098,10 +1099,10 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { if !hashes_needing_fetch.is_empty() { let context = ApiContext::new(None); if context.auth_token.is_none() { - debug_log(&format!( + tracing::debug!( "prompts: no auth token, skipping CAS API fetch for {} hashes", hashes_needing_fetch.len() - )); + ); // All not-cached, CAS-eligible prompts are tentatively "not logged in". // Log each affected prompt so debug output shows exactly which ones // would have been fetched if the user were signed in. @@ -1109,10 +1110,12 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { if let Some(indices) = hash_to_indices.get(hash) { for &idx in indices { let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: auth error (not logged in) id={} hash={} url={}", - dp.id, hash, dp.messages_url - )); + dp.id, + hash, + dp.messages_url + ); cas_initial_failures.insert(idx, "not logged in"); } } @@ -1162,10 +1165,12 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { { for &idx in indices { let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: CAS decode error id={} hash={} url={}", - dp.id, result.hash, dp.messages_url - )); + dp.id, + result.hash, + dp.messages_url + ); } } } @@ -1174,10 +1179,13 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { if let Some(indices) = hash_to_indices.get(&result.hash) { for &idx in indices { let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: CAS not-found id={} hash={} url={} reason=\"{}\"", - dp.id, result.hash, dp.messages_url, reason - )); + dp.id, + result.hash, + dp.messages_url, + reason + ); } } } @@ -1194,10 +1202,13 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { if let Some(indices) = hash_to_indices.get(hash) { for &idx in indices { let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: CAS network error id={} hash={} url={} reason=\"{}\"", - dp.id, hash, dp.messages_url, err - )); + dp.id, + hash, + dp.messages_url, + err + ); } } } @@ -1287,10 +1298,12 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { .unwrap_or("no body in remote prompt store or local sqlite"); *skip_reasons.entry(reason).or_insert(0) += 1; let dp = &deferred[idx]; - debug_log(&format!( + tracing::debug!( "prompts: unresolved id={} url={} reason=\"{}\"", - dp.id, dp.messages_url, reason - )); + dp.id, + dp.messages_url, + reason + ); } } @@ -1313,7 +1326,7 @@ fn resolve_cas_messages(conn: &Connection, deferred: &[DeferredPrompt]) { // Debug-only: one line per skip reason, with counts. for (reason, count) in &skip_reasons { - debug_log(&format!(" {} skipped: {}", count, reason)); + tracing::debug!(" {} skipped: {}", count, reason); } } diff --git a/src/commands/show_prompt.rs b/src/commands/show_prompt.rs index 206aafaec..371708b62 100644 --- a/src/commands/show_prompt.rs +++ b/src/commands/show_prompt.rs @@ -3,7 +3,6 @@ use crate::api::types::CasMessagesObject; use crate::authorship::internal_db::InternalDatabase; use crate::authorship::prompt_utils::find_prompt; use crate::git::find_repository; -use crate::utils::debug_log; /// Handle the `show-prompt` command /// @@ -48,17 +47,17 @@ pub fn handle_show_prompt(args: &[String]) { && let Ok(cas_obj) = serde_json::from_str::(&cached_json) { prompt_record.messages = cas_obj.messages; - debug_log("show-prompt: resolved from cas_cache"); + tracing::debug!("show-prompt: resolved from cas_cache"); } // 2. If cache miss, fetch from CAS API (network) if prompt_record.messages.is_empty() { let context = ApiContext::new(None); if context.auth_token.is_some() { - debug_log(&format!( + tracing::debug!( "show-prompt: trying CAS API for hash {}", &hash[..8.min(hash.len())] - )); + ); let client = ApiClient::new(context); match client.read_ca_prompt_store(&[hash]) { Ok(response) => { @@ -74,10 +73,10 @@ pub fn handle_show_prompt(args: &[String]) { ) { prompt_record.messages = cas_obj.messages; - debug_log(&format!( + tracing::debug!( "show-prompt: resolved {} messages from CAS API", prompt_record.messages.len() - )); + ); // Cache for next time if let Ok(db_mutex) = InternalDatabase::global() && let Ok(mut db_guard) = db_mutex.lock() @@ -89,11 +88,11 @@ pub fn handle_show_prompt(args: &[String]) { } } Err(e) => { - debug_log(&format!("show-prompt: CAS API error: {}", e)); + tracing::debug!("show-prompt: CAS API error: {}", e); } } } else { - debug_log("show-prompt: no auth token, skipping CAS API"); + tracing::debug!("show-prompt: no auth token, skipping CAS API"); } } } @@ -106,10 +105,10 @@ pub fn handle_show_prompt(args: &[String]) { && !db_record.messages.messages.is_empty() { prompt_record.messages = db_record.messages.messages; - debug_log(&format!( + tracing::debug!( "show-prompt: resolved {} messages from local SQLite", prompt_record.messages.len() - )); + ); } } diff --git a/src/config.rs b/src/config.rs index 8629ee10d..8ab0a32d7 100644 --- a/src/config.rs +++ b/src/config.rs @@ -752,7 +752,7 @@ fn build_custom_attributes(file_cfg: &Option) -> HashMap remote, Err(error) => { - debug_log(&format!( - "daemon notes sync: failed to determine remote for {}: {}", - parsed.command.as_deref().unwrap_or("pull"), - error - )); + tracing::debug!( + %error, + command = parsed.command.as_deref().unwrap_or("pull"), + "notes sync: failed to determine remote" + ); return Ok(()); } }; if let Err(error) = fetch_authorship_notes(&repo, &remote) { - debug_log(&format!( - "daemon notes sync: failed to fetch authorship notes from {}: {}", - remote, error - )); + tracing::debug!( + %error, + %remote, + "notes sync: failed to fetch authorship notes" + ); } Ok(()) } @@ -1499,10 +1500,10 @@ fn apply_pull_notes_sync_side_effect( fn apply_clone_notes_sync_side_effect(worktree: &str) -> Result<(), GitAiError> { let repo = find_repository_in_path(worktree)?; if let Err(error) = fetch_authorship_notes(&repo, "origin") { - debug_log(&format!( - "daemon notes sync: failed to fetch clone authorship notes from origin: {}", - error - )); + tracing::debug!( + %error, + "notes sync: failed to fetch clone authorship notes from origin" + ); } Ok(()) } @@ -1616,10 +1617,10 @@ fn apply_checkout_switch_working_log_side_effect( // handled the migration). Fall through to the rename path so the // working log is migrated rather than lost. Attribution may be // slightly misaligned but is preserved. - debug_log(&format!( - "{} --merge missing carryover snapshot, falling back to rename", - cmd.primary_command.as_deref().unwrap_or("checkout") - )); + tracing::warn!( + command = cmd.primary_command.as_deref().unwrap_or("checkout"), + "--merge missing carryover snapshot, falling back to rename" + ); } else { if let Some(snapshot) = carryover_snapshot { // Fix #957: When --merge produced conflict markers (exit_code != 0), @@ -1783,10 +1784,10 @@ fn filter_commit_replay_files( selected_dirty_files.insert(file_path.clone(), target_content); selected_files.push(file_path); } else { - debug_log(&format!( - "Skipping synthetic pre-commit replay for {} because working log already matches committed content", - file_path - )); + tracing::debug!( + %file_path, + "skipping synthetic pre-commit replay because working log already matches committed content" + ); } } @@ -2230,10 +2231,12 @@ fn recover_reset_working_log_for_commit_replay( if let Err(error) = attempt_materialize_commit_chain_authorship(repo, Some(base_commit), &old_head, author) { - debug_log(&format!( - "Failed to backfill reset prerequisite notes between {} and {}: {}", - base_commit, old_head, error - )); + tracing::debug!( + %error, + %base_commit, + %old_head, + "failed to backfill reset prerequisite notes" + ); } reconstruct_working_log_after_reset( repo, @@ -2272,21 +2275,22 @@ fn seed_merge_squash_working_log_for_commit_replay( &merge_squash.source_head, author, ) { - debug_log(&format!( - "Failed to backfill squash prerequisite notes for {}: {}", - merge_squash.source_head, error - )); + tracing::debug!( + %error, + source_head = %merge_squash.source_head, + "failed to backfill squash prerequisite notes" + ); } - debug_log(&format!( - "Seeding merge --squash working log before daemon commit replay for base {}", - base_commit - )); + tracing::debug!( + %base_commit, + "seeding merge --squash working log before commit replay" + ); let Some(final_state) = exact_final_state else { - debug_log(&format!( - "Skipping merge --squash commit replay seed for {} because no committed final state was available", - base_commit - )); + tracing::debug!( + %base_commit, + "skipping merge --squash commit replay seed because no committed final state was available" + ); return Ok(()); }; prepare_working_log_after_squash_from_final_state( @@ -2339,10 +2343,12 @@ fn recover_recent_replay_prerequisites_for_commit_replay( &old_head, author, ) { - debug_log(&format!( - "Failed to backfill recent reset prerequisite notes between {} and {}: {}", - base_commit, old_head, error - )); + tracing::debug!( + %error, + %base_commit, + %old_head, + "failed to backfill recent reset prerequisite notes" + ); } reconstruct_working_log_after_reset( repo, @@ -2444,10 +2450,11 @@ fn ensure_rewrite_prerequisites( ) .map(|_| ()); if let Err(error) = materialize_result { - debug_log(&format!( - "Failed to backfill base commit note for {}: {}", - base_commit, error - )); + tracing::debug!( + %error, + %base_commit, + "failed to backfill base commit note" + ); } } @@ -2816,7 +2823,7 @@ fn apply_stash_rewrite_side_effect( )); }; let Some(stash_sha) = stash_event.stash_sha.as_deref() else { - debug_log("Skipping stash create replay without created stash oid"); + tracing::debug!("skipping stash create replay without created stash oid"); return Ok(()); }; stash_hooks::save_stash_authorship_log( @@ -2893,17 +2900,17 @@ fn maybe_rebase_mappings_from_repository( repository, old_head, new_head, onto_head, )?; if original_commits.is_empty() { - debug_log(&format!( - "{} produced no rebase source commits; skipping rewrite synthesis", - context - )); + tracing::debug!( + %context, + "produced no rebase source commits; skipping rewrite synthesis" + ); return Ok(None); } if new_commits.is_empty() { - debug_log(&format!( - "{} produced no rebased commits; skipping rewrite synthesis", - context - )); + tracing::debug!( + %context, + "produced no rebased commits; skipping rewrite synthesis" + ); return Ok(None); } Ok(Some((original_commits, new_commits))) @@ -3287,10 +3294,12 @@ fn apply_reset_working_log_side_effect( &reset.old_head_sha, human_author, ) { - debug_log(&format!( - "Failed to backfill reset-side-effect notes between {} and {}: {}", - reset.new_head_sha, reset.old_head_sha, error - )); + tracing::debug!( + %error, + new_head = %reset.new_head_sha, + old_head = %reset.old_head_sha, + "failed to backfill reset-side-effect notes" + ); } let tracked_files = tracked_working_log_files(repository, &reset.old_head_sha)?; if !tracked_files.is_empty() && carryover_snapshot.is_none() { @@ -3423,7 +3432,7 @@ fn maybe_setup_daemon_log_file(config: &DaemonConfig) -> Option match setup_daemon_log_file(config) { Ok(guard) => Some(guard), Err(e) => { - debug_log(&format!("daemon log file setup failed: {}", e)); + tracing::error!(%e, "log file setup failed"); None } } @@ -3434,7 +3443,7 @@ fn maybe_setup_daemon_log_file(config: &DaemonConfig) -> Option match setup_daemon_log_file(config) { Ok(guard) => Some(guard), Err(e) => { - debug_log(&format!("daemon log file setup failed: {}", e)); + tracing::error!(%e, "log file setup failed"); None } } @@ -3536,11 +3545,11 @@ fn redirect_windows_stdio_stream( ))); } if unsafe { libc::close(fd) } == -1 { - debug_log(&format!( - "close failed for daemon log stream {} after successful redirect: {}", + tracing::debug!( std_fd, - std::io::Error::last_os_error() - )); + error = %std::io::Error::last_os_error(), + "close failed for log stream after successful redirect" + ); } let set_handle_result = unsafe { SetStdHandle(std_handle, file.as_raw_handle()) }; @@ -4388,10 +4397,10 @@ impl ActorDaemonCoordinator { json!(self.next_trace_ingest_seq()), ); } - debug_log(&format!( - "daemon trace connection close fallback finalized sid={}", - root_sid - )); + tracing::debug!( + sid = %root_sid, + "trace connection close fallback finalized" + ); self.enqueue_trace_payload(payload)?; } Ok(()) @@ -4656,57 +4665,37 @@ impl ActorDaemonCoordinator { _ = coordinator.wait_for_shutdown() => break, }; let Some(seq) = payload.get(TRACE_INGEST_SEQ_FIELD).and_then(Value::as_u64) else { - let error = GitAiError::Generic( - "trace ingest payload missing ingress sequence".to_string(), - ); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "missing_ingest_seq", - "payload": payload, - })), + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "missing_ingest_seq", + "trace ingest payload missing ingress sequence" ); coordinator.request_shutdown(); break; }; if pending_by_seq.len() >= TRACE_INGEST_QUEUE_CAPACITY { - let error = GitAiError::Generic(format!( - "trace ingest reorder buffer overflow at {} entries; next_seq={}", - pending_by_seq.len(), - next_seq - )); - debug_log(&format!("{}", error)); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "reorder_buffer_overflow", - "buffered_count": pending_by_seq.len(), - "next_seq": next_seq, - "received_seq": seq, - })), + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "reorder_buffer_overflow", + buffered_count = pending_by_seq.len(), + next_seq, + received_seq = seq, + "trace ingest reorder buffer overflow" ); coordinator.request_shutdown(); break; } if pending_by_seq.insert(seq, payload).is_some() { - let error = GitAiError::Generic(format!( - "duplicate trace ingest sequence received: {}", - seq - )); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "duplicate_ingest_seq", - "sequence": seq, - })), + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "duplicate_ingest_seq", + sequence = seq, + "duplicate trace ingest sequence received" ); coordinator.request_shutdown(); break; @@ -4726,16 +4715,14 @@ impl ActorDaemonCoordinator { match futures::FutureExt::catch_unwind(caught).await { Ok(Ok(())) => Ok(()), Ok(Err(error)) => { - debug_log(&format!("daemon trace ingest error: {}", error)); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "ingest_error", - "sequence": processed_seq, - "root_sid": ordered_payload_root, - })), + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "ingest_error", + sequence = processed_seq, + root_sid = ?ordered_payload_root, + %error, + "trace ingest error" ); Err(error) } @@ -4748,22 +4735,18 @@ impl ActorDaemonCoordinator { } else { "unknown panic".to_string() }; - let error = GitAiError::Generic(format!( + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "panic_in_ingest", + panic_msg = %panic_msg, + sequence = processed_seq, + "trace ingest panic" + ); + Err(GitAiError::Generic(format!( "trace ingest worker panic: {}", panic_msg - )); - debug_log(&format!("daemon trace ingest panic: {}", panic_msg)); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "panic_in_ingest", - "panic_message": panic_msg, - "sequence": processed_seq, - })), - ); - Err(error) + ))) } } }; @@ -4776,10 +4759,10 @@ impl ActorDaemonCoordinator { if let Err(error) = coordinator .record_trace_payload_processed_root(ordered_payload_root.as_deref()) { - debug_log(&format!( - "daemon trace payload accounting error after ingest: {}", - error - )); + tracing::debug!( + %error, + "trace payload accounting error after ingest" + ); } // Release: pairs with Acquire loads in wait_for_trace_ingest_processed_through // so waiters observe all ingest side-effects when seq advances. @@ -4796,22 +4779,15 @@ impl ActorDaemonCoordinator { } if !pending_by_seq.is_empty() { - let error = GitAiError::Generic(format!( - "trace ingest worker exiting with {} buffered out-of-order frame(s); next_seq={}", - pending_by_seq.len(), - next_seq - )); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "trace_ingest_worker", - "reason": "unflushed_buffer_on_shutdown", - "buffered_count": pending_by_seq.len(), - "next_seq": next_seq, - "min_buffered_seq": pending_by_seq.keys().next().copied(), - "max_buffered_seq": pending_by_seq.keys().last().copied(), - })), + tracing::error!( + component = "daemon", + phase = "trace_ingest_worker", + reason = "unflushed_buffer_on_shutdown", + buffered_count = pending_by_seq.len(), + next_seq, + min_buffered_seq = ?pending_by_seq.keys().next().copied(), + max_buffered_seq = ?pending_by_seq.keys().last().copied(), + "trace ingest worker exiting with buffered out-of-order frames" ); } }); @@ -4846,24 +4822,21 @@ impl ActorDaemonCoordinator { |current| Some(current.saturating_sub(1)), ); if let Err(error) = self.record_trace_payload_processed_root(payload_root.as_deref()) { - debug_log(&format!( - "daemon trace payload accounting rollback error: {}", - error - )); + tracing::debug!( + %error, + "trace payload accounting rollback error" + ); } - let error = GitAiError::Generic( - "trace ingest queue send failed: worker may have crashed".to_string(), - ); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "enqueue_trace_payload", - "reason": "ingest_worker_channel_closed", - })), + tracing::error!( + component = "daemon", + phase = "enqueue_trace_payload", + reason = "ingest_worker_channel_closed", + "trace ingest queue send failed: worker may have crashed" ); self.request_shutdown(); - return Err(error); + return Err(GitAiError::Generic( + "trace ingest queue send failed: worker may have crashed".to_string(), + )); } Ok(()) } @@ -4998,14 +4971,12 @@ impl ActorDaemonCoordinator { let mut ingress = match self.trace_ingress_state.lock() { Ok(guard) => guard, Err(_) => { - observability::log_error( - &GitAiError::Generic("trace ingress state lock poisoned".to_string()), - Some(serde_json::json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "sid": sid, - "event": event, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + %sid, + %event, + "trace ingress state lock poisoned" ); return false; } @@ -5150,19 +5121,14 @@ impl ActorDaemonCoordinator { } Ok(None) => {} Err(error) => { - debug_log(&format!( - "daemon commit squash context capture failed sid={}: {}", - sid, error - )); - observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "root_sid": root, - "sid": sid, - "argv": effective_argv, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + root_sid = %root, + %sid, + ?effective_argv, + %error, + "commit squash context capture failed" ); } } @@ -5179,19 +5145,14 @@ impl ActorDaemonCoordinator { } Ok(None) => {} Err(error) => { - debug_log(&format!( - "daemon stash target resolution failed sid={}: {}", - sid, error - )); - observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "root_sid": root, - "sid": sid, - "argv": effective_argv, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + root_sid = %root, + %sid, + ?effective_argv, + %error, + "stash target resolution failed" ); object.insert( "git_ai_stash_target_oid_error".to_string(), @@ -5247,19 +5208,14 @@ impl ActorDaemonCoordinator { } Ok(None) => {} Err(error) => { - debug_log(&format!( - "daemon merge --squash context capture failed sid={}: {}", - sid, error - )); - observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "root_sid": root, - "sid": sid, - "argv": effective_argv, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + root_sid = %root, + %sid, + ?effective_argv, + %error, + "merge --squash context capture failed" ); } } @@ -5268,14 +5224,12 @@ impl ActorDaemonCoordinator { let mut ingress = match self.trace_ingress_state.lock() { Ok(guard) => guard, Err(_) => { - observability::log_error( - &GitAiError::Generic("trace ingress state lock poisoned".to_string()), - Some(serde_json::json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "sid": sid, - "event": event, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + %sid, + %event, + "trace ingress state lock poisoned" ); return false; } @@ -5362,10 +5316,11 @@ impl ActorDaemonCoordinator { terminal_ref_changes = Some(ref_changes); } Err(error) => { - debug_log(&format!( - "daemon trace reflog delta capture error sid={}: {}", - sid, error - )); + tracing::debug!( + %error, + %sid, + "trace reflog delta capture error" + ); } } object.insert("git_ai_family_reflog_end".to_string(), json!(end_offsets)); @@ -5387,19 +5342,14 @@ impl ActorDaemonCoordinator { } Ok(None) => {} Err(error) => { - debug_log(&format!( - "daemon terminal stash target resolution failed sid={}: {}", - sid, error - )); - observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "root_sid": root, - "sid": sid, - "argv": effective_argv, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + root_sid = %root, + %sid, + ?effective_argv, + %error, + "terminal stash target resolution failed" ); object.insert( "git_ai_stash_target_oid_error".to_string(), @@ -5451,19 +5401,14 @@ impl ActorDaemonCoordinator { } Ok(None) => {} Err(error) => { - debug_log(&format!( - "daemon carryover snapshot capture failed sid={}: {}", - sid, error - )); - observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "phase": "augment_trace_payload_with_reflog_metadata", - "root_sid": root, - "sid": sid, - "argv": effective_argv, - })), + tracing::error!( + component = "daemon", + phase = "augment_trace_payload_with_reflog_metadata", + root_sid = %root, + %sid, + ?effective_argv, + %error, + "carryover snapshot capture failed" ); } } @@ -5599,10 +5544,12 @@ impl ActorDaemonCoordinator { Ok(Ok((applied, side_effect_result))) => { if let Err(error) = &side_effect_result { let _ = self.record_side_effect_error(family, order, error); - debug_log(&format!( - "daemon command side effect failed for family {} seq {}: {}", - family, applied.seq, error - )); + tracing::error!( + %error, + %family, + seq = applied.seq, + "command side effect failed" + ); } if let Err(error) = self.append_command_completion_log( family, @@ -5611,18 +5558,22 @@ impl ActorDaemonCoordinator { order, ) { let _ = self.record_side_effect_error(family, order, &error); - debug_log(&format!( - "daemon command completion log write failed for family {} order {}: {}", - family, order, error - )); + tracing::error!( + %error, + %family, + order, + "command completion log write failed" + ); } } Ok(Err(error)) => { let _ = self.record_side_effect_error(family, order, &error); - debug_log(&format!( - "daemon command apply failed for family {} order {}: {}", - family, order, error - )); + tracing::error!( + %error, + %family, + order, + "command apply failed" + ); } Err(panic_payload) => { let panic_msg = if let Some(s) = panic_payload.downcast_ref::() @@ -5638,20 +5589,14 @@ impl ActorDaemonCoordinator { panic_msg )); let _ = self.record_side_effect_error(family, order, &error); - debug_log(&format!( - "daemon command side effect panic for family {} order {}: {}", - family, order, panic_msg - )); - observability::log_error( - &error, - Some(serde_json::json!({ - "component": "daemon", - "phase": "command_side_effect", - "reason": "panic_in_side_effect", - "panic_message": panic_msg, - "family": family, - "order": order, - })), + tracing::error!( + component = "daemon", + phase = "command_side_effect", + reason = "panic_in_side_effect", + panic_msg = %panic_msg, + %family, + order, + "command side effect panic" ); } } @@ -5692,6 +5637,13 @@ impl ActorDaemonCoordinator { ); let should_log_completion = crate::daemon::test_sync::tracks_checkpoint_request_for_test_sync(&request); + let checkpoint_kind_str: &str = match request.as_ref() { + CheckpointRunRequest::Live(req) => req.kind.as_deref().unwrap_or("human"), + CheckpointRunRequest::Captured(_) => "captured", + }; + let checkpoint_kind_str = checkpoint_kind_str.to_string(); + tracing::info!(kind = %checkpoint_kind_str, repo = %repo_wd, "checkpoint start"); + let checkpoint_start = std::time::Instant::now(); // Wrap checkpoint processing in catch_unwind to recover from panics. let checkpoint_result = { let future = async { @@ -5718,23 +5670,14 @@ impl ActorDaemonCoordinator { } else { "unknown panic".to_string() }; - debug_log(&format!( - "daemon checkpoint side effect panic for family {} order {}: {}", - family, order, panic_msg - )); - observability::log_error( - &GitAiError::Generic(format!( - "daemon checkpoint side effect panic: {}", - panic_msg - )), - Some(serde_json::json!({ - "component": "daemon", - "phase": "checkpoint_side_effect", - "reason": "panic_in_side_effect", - "panic_message": panic_msg, - "family": family, - "order": order, - })), + tracing::error!( + component = "daemon", + phase = "checkpoint_side_effect", + reason = "panic_in_side_effect", + panic_msg = %panic_msg, + %family, + order, + "checkpoint side effect panic" ); Err(GitAiError::Generic(format!( "daemon checkpoint panic: {}", @@ -5742,6 +5685,22 @@ impl ActorDaemonCoordinator { ))) } }; + let checkpoint_duration_ms = checkpoint_start.elapsed().as_millis(); + if result.is_ok() { + tracing::info!( + kind = %checkpoint_kind_str, + repo = %repo_wd, + duration_ms = checkpoint_duration_ms as u64, + "checkpoint done" + ); + } else { + tracing::warn!( + kind = %checkpoint_kind_str, + repo = %repo_wd, + duration_ms = checkpoint_duration_ms as u64, + "checkpoint failed" + ); + } if result.is_ok() { let per_file = if !checkpoint_file_paths.is_empty() { compute_watermarks_from_stat(&repo_wd, &checkpoint_file_paths) @@ -5794,18 +5753,23 @@ impl ActorDaemonCoordinator { if result.is_ok() { result = Err(cleanup_error); } else { - debug_log(&format!( - "daemon captured checkpoint cleanup failed for family {} order {} capture {}: {}", - family, order, capture_id, cleanup_error - )); + tracing::debug!( + %cleanup_error, + %family, + order, + %capture_id, + "captured checkpoint cleanup failed" + ); } } if let Err(error) = &result { let _ = self.record_side_effect_error(family, order, error); - debug_log(&format!( - "daemon checkpoint side effect failed for family {} order {}: {}", - family, order, error - )); + tracing::error!( + %error, + %family, + order, + "checkpoint side effect failed" + ); } if should_log_completion { let log_entry = TestCompletionLogEntry { @@ -5827,10 +5791,12 @@ impl ActorDaemonCoordinator { self.maybe_append_test_completion_log(family, &log_entry) { let _ = self.record_side_effect_error(family, order, &error); - debug_log(&format!( - "daemon checkpoint completion log write failed for family {} order {}: {}", - family, order, error - )); + tracing::error!( + %error, + %family, + order, + "checkpoint completion log write failed" + ); } } if let Some(respond_to) = respond_to { @@ -6364,10 +6330,10 @@ impl ActorDaemonCoordinator { start_target_hint.as_deref(), )? else { - debug_log(&format!( - "rebase complete produced no unprocessed replay segment; skipping rewrite synthesis sid={}", - cmd.root_sid - )); + tracing::debug!( + sid = %cmd.root_sid, + "rebase complete produced no unprocessed replay segment; skipping rewrite synthesis" + ); if let Some(worktree) = cmd.worktree.as_ref() { self.clear_pending_rebase_original_head_for_worktree(worktree)?; } @@ -6376,10 +6342,13 @@ impl ActorDaemonCoordinator { if (!old_head.is_empty() && old_head != &mapping_old_head) || (!new_head.is_empty() && new_head != &stable_new_head) { - debug_log(&format!( - "rebase complete semantic heads diverged from stable reflog heads semantic_old={} semantic_new={} stable_old={} stable_new={}", - old_head, new_head, mapping_old_head, stable_new_head - )); + tracing::debug!( + semantic_old = %old_head, + semantic_new = %new_head, + stable_old = %mapping_old_head, + stable_new = %stable_new_head, + "rebase complete semantic heads diverged from stable reflog heads" + ); } if let Some((original_commits, new_commits)) = maybe_rebase_mappings_from_repository( @@ -6437,10 +6406,12 @@ impl ActorDaemonCoordinator { "cherry_pick_complete", )?; if !original_head.is_empty() && original_head != &resolved_original_head { - debug_log(&format!( - "cherry-pick complete original head mismatch semantic={} resolved={} new={}", - original_head, resolved_original_head, new_head - )); + tracing::debug!( + semantic = %original_head, + resolved = %resolved_original_head, + new = %new_head, + "cherry-pick complete original head mismatch" + ); } out.push(RewriteLogEvent::cherry_pick_complete( CherryPickCompleteEvent::new( @@ -6579,10 +6550,10 @@ impl ActorDaemonCoordinator { None, )? else { - debug_log(&format!( - "pull --rebase produced no unprocessed replay segment; skipping rewrite synthesis sid={}", - cmd.root_sid - )); + tracing::debug!( + sid = %cmd.root_sid, + "pull --rebase produced no unprocessed replay segment; skipping rewrite synthesis" + ); if let Some(worktree) = cmd.worktree.as_ref() { self.clear_pending_rebase_original_head_for_worktree(worktree)?; } @@ -6673,6 +6644,31 @@ impl ActorDaemonCoordinator { let cmd = &applied.command; let events = &applied.analysis.events; let parsed_invocation = parsed_invocation_for_normalized_command(cmd); + + let primary = cmd.primary_command.as_deref().unwrap_or("unknown"); + let is_write_op = matches!( + primary, + "commit" | "rebase" | "merge" | "cherry-pick" | "am" | "stash" | "reset" | "push" + ); + if is_write_op && cmd.exit_code == 0 { + let repo_path = cmd + .worktree + .as_ref() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + let post_head = cmd + .post_repo + .as_ref() + .and_then(|r| r.head.clone()) + .unwrap_or_default(); + tracing::info!( + op = primary, + repo = %repo_path, + new_head = %post_head, + "git write op completed" + ); + } + let saw_pull_event = events.iter().any(|event| { matches!( event, @@ -6694,24 +6690,24 @@ impl ActorDaemonCoordinator { .as_deref() .is_some_and(|v| v == "1") { - debug_log(&format!( - "daemon side-effect command={} primary={} seq={} argv={:?} invoked_args={:?} ref_changes_len={} ref_changes={:?} events={:?} pre_head={:?} post_head={:?} exit_code={}", - cmd.invoked_command.clone().unwrap_or_default(), - cmd.primary_command.clone().unwrap_or_default(), - applied.seq, - cmd.raw_argv, - cmd.invoked_args, - cmd.ref_changes.len(), - cmd.ref_changes, - events, - cmd.pre_repo.as_ref().and_then(|repo| repo.head.clone()), - cmd.post_repo.as_ref().and_then(|repo| repo.head.clone()), - cmd.exit_code, - )); - debug_log(&format!( - "daemon side-effect inflight_rebase_original_head={:?}", - cmd.inflight_rebase_original_head - )); + tracing::debug!( + command = cmd.invoked_command.clone().unwrap_or_default(), + primary = cmd.primary_command.clone().unwrap_or_default(), + seq = applied.seq, + argv = ?cmd.raw_argv, + invoked_args = ?cmd.invoked_args, + ref_changes_len = cmd.ref_changes.len(), + ref_changes = ?cmd.ref_changes, + events = ?events, + pre_head = ?cmd.pre_repo.as_ref().and_then(|repo| repo.head.clone()), + post_head = ?cmd.post_repo.as_ref().and_then(|repo| repo.head.clone()), + exit_code = cmd.exit_code, + "side-effect trace" + ); + tracing::debug!( + inflight_rebase_original_head = ?cmd.inflight_rebase_original_head, + "side-effect inflight rebase state" + ); } let carryover_snapshot = if let Some(snapshot_id) = cmd.carryover_snapshot_id.as_deref() { self.take_carryover_snapshot(&cmd.root_sid, snapshot_id)? @@ -6785,10 +6781,10 @@ impl ActorDaemonCoordinator { // snapshot — in that case carryover_snapshot would be Some and the guard would // not fire. So reaching here means there are no pre-rebase uncommitted files // to carry over, and the warning is benign. - debug_log(&format!( - "{} missing captured carryover snapshot for async restore (likely AI conflict-resolution checkpoint; attribution handled via working-log fallback)", - cmd.primary_command.as_deref().unwrap_or("pull") - )); + tracing::warn!( + command = cmd.primary_command.as_deref().unwrap_or("pull"), + "missing captured carryover snapshot for async restore (likely AI conflict-resolution checkpoint; attribution handled via working-log fallback)" + ); } } } @@ -6810,10 +6806,11 @@ impl ActorDaemonCoordinator { .as_deref() .is_some_and(|v| v == "1") { - debug_log(&format!( - "daemon pending rebase original head set family={:?} head={}", - family, old_head - )); + tracing::debug!( + ?family, + %old_head, + "pending rebase original head set" + ); } self.set_pending_rebase_original_head_for_worktree(worktree, old_head)?; } @@ -6865,10 +6862,10 @@ impl ActorDaemonCoordinator { return Ok(()); } if is_stash_restore { - debug_log(&format!( - "Stash restore with non-zero exit for sid={}, continuing to restore attribution", - cmd.root_sid - )); + tracing::debug!( + sid = %cmd.root_sid, + "stash restore with non-zero exit, continuing to restore attribution" + ); } } @@ -6878,10 +6875,11 @@ impl ActorDaemonCoordinator { match event { crate::daemon::domain::SemanticEvent::CloneCompleted { .. } => { if let Err(e) = apply_clone_notes_sync_side_effect(&worktree) { - debug_log(&format!( - "daemon clone notes side effect failed for {}: {}", - worktree, e - )); + tracing::debug!( + %e, + %worktree, + "clone notes side effect failed" + ); } } crate::daemon::domain::SemanticEvent::PullCompleted { .. } => { @@ -6906,20 +6904,15 @@ impl ActorDaemonCoordinator { let rewrite_events = match self.rewrite_events_from_semantic_events(cmd, events) { Ok(rewrite_events) => rewrite_events, Err(error) => { - debug_log(&format!( - "daemon strict rewrite synthesis failed command={:?} invoked={:?} sid={} error={}", - cmd.primary_command, cmd.invoked_command, cmd.root_sid, error - )); - crate::observability::log_error( - &error, - Some(json!({ - "component": "daemon", - "operation": "rewrite_events_from_semantic_events", - "command": cmd.primary_command, - "invoked_command": cmd.invoked_command, - "root_sid": cmd.root_sid, - "family": family, - })), + tracing::error!( + component = "daemon", + operation = "rewrite_events_from_semantic_events", + command = ?cmd.primary_command, + invoked_command = ?cmd.invoked_command, + root_sid = %cmd.root_sid, + ?family, + %error, + "strict rewrite synthesis failed" ); return Err(error); } @@ -7062,18 +7055,22 @@ impl ActorDaemonCoordinator { let _ = self.end_family_effect(&family); if let Err(error) = result { let _ = self.record_side_effect_error(&family, applied.seq, &error); - debug_log(&format!( - "daemon async side-effect error for family {} seq {}: {}", - family, applied.seq, error - )); + tracing::error!( + %error, + %family, + seq = applied.seq, + "async side-effect error" + ); } else if let Err(error) = self.append_command_completion_log(&family, &applied, &Ok(()), applied.seq) { let _ = self.record_side_effect_error(&family, applied.seq, &error); - debug_log(&format!( - "daemon async completion log write failed for family {} seq {}: {}", - family, applied.seq, error - )); + tracing::error!( + %error, + %family, + seq = applied.seq, + "async completion log write failed" + ); } } } @@ -7333,12 +7330,12 @@ fn control_listener_loop_actor( if std::thread::Builder::new() .spawn(move || { if let Err(e) = handle_control_connection_actor(stream, coord, handle) { - debug_log(&format!("daemon control connection error: {}", e)); + tracing::debug!(%e, "control connection error"); } }) .is_err() { - debug_log("daemon control listener: failed to spawn handler thread"); + tracing::error!("control listener: failed to spawn handler thread"); break; } } @@ -7357,7 +7354,7 @@ fn control_listener_loop_actor( let result = windows_control_pipe_worker_loop(path, first_connecting, coord.clone(), handle); if let Err(error) = &result { - debug_log(&format!("daemon control worker error: {}", error)); + tracing::error!(%error, "control worker error"); coord.request_shutdown(); } result @@ -7372,7 +7369,7 @@ fn control_listener_loop_actor( let result = windows_control_pipe_worker_loop(path, connecting, coord.clone(), handle); if let Err(error) = &result { - debug_log(&format!("daemon control worker error: {}", error)); + tracing::error!(%error, "control worker error"); coord.request_shutdown(); } result @@ -7449,7 +7446,7 @@ fn windows_control_pipe_worker_loop( coordinator.clone(), runtime_handle.clone(), ) { - debug_log(&format!("daemon control connection error: {}", e)); + tracing::debug!(%e, "control connection error"); } } @@ -7528,12 +7525,12 @@ fn trace_listener_loop_actor( if std::thread::Builder::new() .spawn(move || { if let Err(e) = handle_trace_connection_actor(stream, coord) { - debug_log(&format!("daemon trace connection error: {}", e)); + tracing::debug!(%e, "trace connection error"); } }) .is_err() { - debug_log("daemon trace listener: failed to spawn handler thread"); + tracing::error!("trace listener: failed to spawn handler thread"); break; } } @@ -7550,7 +7547,7 @@ fn trace_listener_loop_actor( workers.push(std::thread::spawn(move || { let result = windows_trace_pipe_worker_loop(path, first_connecting, coord.clone()); if let Err(error) = &result { - debug_log(&format!("daemon trace worker error: {}", error)); + tracing::error!(%error, "trace worker error"); coord.request_shutdown(); } result @@ -7563,7 +7560,7 @@ fn trace_listener_loop_actor( workers.push(std::thread::spawn(move || { let result = windows_trace_pipe_worker_loop(path, connecting, coord.clone()); if let Err(error) = &result { - debug_log(&format!("daemon trace worker error: {}", error)); + tracing::error!(%error, "trace worker error"); coord.request_shutdown(); } result @@ -7610,7 +7607,7 @@ fn windows_trace_pipe_worker_loop( { let mut reader = BufReader::new(&mut server); if let Err(e) = handle_trace_connection_actor_reader(&mut reader, coordinator.clone()) { - debug_log(&format!("daemon trace connection error: {}", e)); + tracing::debug!(%e, "trace connection error"); } } @@ -7674,18 +7671,18 @@ fn handle_trace_connection_actor_reader( if let Err(error) = coordinator.enqueue_stale_connection_close_fallbacks(&stale_candidates) { - debug_log(&format!( - "daemon trace connection close fallback error: {}", - error - )); + tracing::debug!( + %error, + "trace connection close fallback error" + ); } } Ok(_) => {} Err(error) => { - debug_log(&format!( - "daemon trace connection close bookkeeping error: {}", - error - )); + tracing::debug!( + %error, + "trace connection close bookkeeping error" + ); } } } @@ -7797,21 +7794,21 @@ fn daemon_update_check_loop(coordinator: Arc, started_at match check_for_update_available() { Ok(DaemonUpdateCheckResult::UpdateReady) => { - debug_log("daemon update check: newer version available, requesting shutdown"); + tracing::info!("update check: newer version available, requesting shutdown"); coordinator.request_shutdown(); return; } Ok(DaemonUpdateCheckResult::NoUpdate) => { - debug_log("daemon update check: no update needed"); + tracing::info!("update check: no update needed"); } Err(err) => { - debug_log(&format!("daemon update check failed: {}", err)); + tracing::warn!(%err, "update check failed"); } } let uptime_ns = now_unix_nanos().saturating_sub(started_at_ns); if uptime_ns >= daemon_max_uptime_ns() { - debug_log("daemon uptime exceeded max, requesting restart"); + tracing::info!("uptime exceeded max, requesting restart"); coordinator.request_shutdown(); return; } @@ -7830,13 +7827,13 @@ pub(crate) fn daemon_run_pending_self_update() { match check_and_install_update_if_available() { Ok(DaemonUpdateCheckResult::UpdateReady) => { - debug_log("daemon self-update: installation completed successfully"); + tracing::info!("self-update: installation completed successfully"); } Ok(DaemonUpdateCheckResult::NoUpdate) => { - debug_log("daemon self-update: no update to install"); + tracing::info!("self-update: no update to install"); } Err(err) => { - debug_log(&format!("daemon self-update: installation failed: {}", err)); + tracing::warn!(%err, "self-update: installation failed"); } } } @@ -7848,15 +7845,48 @@ pub async fn run_daemon(config: DaemonConfig) -> Result<(), GitAiError> { if let Err(error) = crate::commands::checkpoint::prune_stale_captured_checkpoints( Duration::from_secs(60 * 60 * 24), ) { - debug_log(&format!( - "daemon stale captured checkpoint pruning failed: {}", - error - )); + tracing::warn!( + %error, + "stale captured checkpoint pruning failed" + ); } let _lock = DaemonLock::acquire(&config.lock_path)?; let _active_guard = DaemonProcessActiveGuard::enter(); write_pid_metadata(&config)?; + + // Initialize tracing subscriber before log file redirect so the fmt layer + // captures stderr (fd 2). After dup2, writes go to the daemon log file. + { + use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt}; + + let env_filter = if std::env::var("GIT_AI_DEBUG").as_deref() == Ok("1") { + EnvFilter::new("debug") + } else { + EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")) + }; + + tracing_subscriber::registry() + .with(env_filter) + .with( + tracing_subscriber::fmt::layer() + .with_target(false) + .with_thread_ids(false) + .with_ansi(false), + ) + .with(crate::daemon::sentry_layer::SentryLayer) + .init(); + } + let _log_guard = maybe_setup_daemon_log_file(&config); + + tracing::info!( + pid = std::process::id(), + version = env!("CARGO_PKG_VERSION"), + os = std::env::consts::OS, + arch = std::env::consts::ARCH, + "daemon started" + ); + remove_socket_if_exists(&config.trace_socket_path)?; remove_socket_if_exists(&config.control_socket_path)?; @@ -7881,10 +7911,10 @@ pub async fn run_daemon(config: DaemonConfig) -> Result<(), GitAiError> { match result { Ok(Ok(())) => {} Ok(Err(e)) => { - debug_log(&format!("daemon control listener exited with error: {}", e)); + tracing::error!(%e, "control listener exited with error"); } Err(_) => { - debug_log("daemon control listener panicked"); + tracing::error!("control listener panicked"); } } // Always request shutdown so the daemon doesn't stay half-alive. @@ -7900,10 +7930,10 @@ pub async fn run_daemon(config: DaemonConfig) -> Result<(), GitAiError> { match result { Ok(Ok(())) => {} Ok(Err(e)) => { - debug_log(&format!("daemon trace listener exited with error: {}", e)); + tracing::error!(%e, "trace listener exited with error"); } Err(_) => { - debug_log("daemon trace listener panicked"); + tracing::error!("trace listener panicked"); } } // Always request shutdown so the daemon doesn't stay half-alive. @@ -7934,6 +7964,8 @@ pub async fn run_daemon(config: DaemonConfig) -> Result<(), GitAiError> { remove_socket_if_exists(&config.control_socket_path)?; remove_pid_metadata(&config)?; + tracing::info!("daemon shutdown complete"); + Ok(()) } diff --git a/src/daemon/sentry_layer.rs b/src/daemon/sentry_layer.rs new file mode 100644 index 000000000..e2a75800d --- /dev/null +++ b/src/daemon/sentry_layer.rs @@ -0,0 +1,93 @@ +//! Custom tracing Layer that forwards ERROR-level events to Sentry +//! via the existing daemon telemetry worker pipeline. + +use tracing::field::{Field, Visit}; +use tracing::{Event, Level, Subscriber}; +use tracing_subscriber::layer::{Context, Layer}; + +/// A tracing Layer that intercepts ERROR-level events and routes them +/// to the daemon's telemetry worker as `TelemetryEnvelope::Error` events, +/// which get forwarded to both enterprise and OSS Sentry DSNs. +pub struct SentryLayer; + +struct MessageVisitor { + message: String, + fields: serde_json::Map, +} + +impl MessageVisitor { + fn new() -> Self { + Self { + message: String::new(), + fields: serde_json::Map::new(), + } + } +} + +impl Visit for MessageVisitor { + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + if field.name() == "message" { + self.message = format!("{:?}", value); + } else { + self.fields.insert( + field.name().to_string(), + serde_json::Value::String(format!("{:?}", value)), + ); + } + } + + fn record_str(&mut self, field: &Field, value: &str) { + if field.name() == "message" { + self.message = value.to_string(); + } else { + self.fields.insert( + field.name().to_string(), + serde_json::Value::String(value.to_string()), + ); + } + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.fields.insert( + field.name().to_string(), + serde_json::Value::Number(value.into()), + ); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.fields.insert( + field.name().to_string(), + serde_json::Value::Number(value.into()), + ); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.fields + .insert(field.name().to_string(), serde_json::Value::Bool(value)); + } +} + +impl Layer for SentryLayer { + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + if *event.metadata().level() != Level::ERROR { + return; + } + + let mut visitor = MessageVisitor::new(); + event.record(&mut visitor); + + let context = if visitor.fields.is_empty() { + None + } else { + Some(serde_json::Value::Object(visitor.fields)) + }; + + let envelope = crate::daemon::control_api::TelemetryEnvelope::Error { + timestamp: chrono::Utc::now().to_rfc3339(), + message: visitor.message, + context, + }; + + crate::daemon::telemetry_worker::submit_daemon_internal_telemetry(vec![envelope]); + } +} diff --git a/src/daemon/telemetry_worker.rs b/src/daemon/telemetry_worker.rs index 00219269e..feb72e81a 100644 --- a/src/daemon/telemetry_worker.rs +++ b/src/daemon/telemetry_worker.rs @@ -9,7 +9,6 @@ use crate::daemon::control_api::{CasSyncPayload, TelemetryEnvelope}; use crate::metrics::db::MetricsDatabase; use crate::metrics::{MetricEvent, MetricsBatch}; use crate::observability::MAX_METRICS_PER_ENVELOPE; -use crate::utils::debug_log; use serde_json::{Value, json}; use std::collections::BTreeMap; use std::sync::Arc; @@ -252,7 +251,7 @@ async fn telemetry_flush_loop(buffer: Arc>) { }) .await .unwrap_or_else(|e| { - debug_log(&format!("telemetry flush task panicked: {}", e)); + tracing::error!(%e, "telemetry flush task panicked"); }); } } @@ -523,7 +522,7 @@ fn flush_cas(records: Vec) { let using_default_api = api_base_url == crate::config::DEFAULT_API_BASE_URL; if using_default_api && !client.is_logged_in() && !client.has_api_key() { - debug_log("daemon telemetry: skipping CAS flush, not logged in"); + tracing::debug!("telemetry: skipping CAS flush, not logged in"); return; } @@ -533,7 +532,7 @@ fn flush_cas(records: Vec) { let content: Value = match serde_json::from_str(&record.data) { Ok(v) => v, Err(e) => { - debug_log(&format!("daemon telemetry: CAS parse error: {}", e)); + tracing::warn!(%e, "telemetry: CAS parse error"); continue; } }; @@ -568,13 +567,10 @@ fn flush_cas(records: Vec) { { let _ = db_lock.delete_cas_by_hashes(&hashes); } - debug_log(&format!( - "daemon telemetry: uploaded {} CAS objects", - chunk.len() - )); + tracing::debug!(count = chunk.len(), "telemetry: uploaded CAS objects"); } Err(e) => { - debug_log(&format!("daemon telemetry: CAS upload error: {}", e)); + tracing::warn!(%e, "telemetry: CAS upload error"); } } } diff --git a/src/git/refs.rs b/src/git/refs.rs index 12199ff95..8657d37fe 100644 --- a/src/git/refs.rs +++ b/src/git/refs.rs @@ -2,7 +2,6 @@ use crate::authorship::authorship_log_serialization::{AUTHORSHIP_LOG_VERSION, Au use crate::authorship::working_log::Checkpoint; use crate::error::GitAiError; use crate::git::repository::{Repository, exec_git, exec_git_stdin}; -use crate::utils::debug_log; use serde_json; use std::collections::{HashMap, HashSet}; @@ -338,10 +337,10 @@ pub fn notes_add_blob_batch( crate::authorship::git_ai_hooks::post_notes_updated(repo, &entries) } Ok(_) => {} - Err(e) => debug_log(&format!( + Err(e) => tracing::debug!( "Failed to prepare post_notes_updated payload for notes_add_blob_batch: {}", e - )), + ), } } @@ -568,10 +567,7 @@ pub fn merge_notes_from_ref(repo: &Repository, source_ref: &str) -> Result<(), G args.push("--quiet".to_string()); args.push(source_ref.to_string()); - debug_log(&format!( - "Merging notes from {} into refs/notes/ai", - source_ref - )); + tracing::debug!("Merging notes from {} into refs/notes/ai", source_ref); exec_git(&args)?; Ok(()) } @@ -627,7 +623,7 @@ pub fn fallback_merge_notes_ours(repo: &Repository, source_ref: &str) -> Result< ]); exec_git_stdin(&args, stream.as_bytes())?; - debug_log("fallback merge via fast-import completed successfully"); + tracing::debug!("fallback merge via fast-import completed successfully"); Ok(()) } @@ -679,7 +675,7 @@ pub fn copy_ref(repo: &Repository, source_ref: &str, dest_ref: &str) -> Result<( args.push(dest_ref.to_string()); args.push(source_ref.to_string()); - debug_log(&format!("Copying ref {} to {}", source_ref, dest_ref)); + tracing::debug!("Copying ref {} to {}", source_ref, dest_ref); exec_git(&args)?; Ok(()) } diff --git a/src/git/repo_storage.rs b/src/git/repo_storage.rs index 692e57d1e..f6442eb77 100644 --- a/src/git/repo_storage.rs +++ b/src/git/repo_storage.rs @@ -4,7 +4,7 @@ use crate::authorship::authorship_log_serialization::generate_short_hash; use crate::authorship::working_log::{CHECKPOINT_API_VERSION, Checkpoint, CheckpointKind}; use crate::error::GitAiError; use crate::git::rewrite_log::{RewriteLogEvent, append_event_to_file}; -use crate::utils::{debug_log, normalize_to_posix}; +use crate::utils::normalize_to_posix; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; use std::collections::{HashMap, HashSet}; @@ -126,10 +126,7 @@ impl RepoStorage { // Best-effort; don't fail the commit if we can't write the marker let _ = fs::write(&marker, now.to_string()); - debug_log(&format!( - "Moved checkpoint directory from {} to old-{}", - sha, sha - )); + tracing::debug!("Moved checkpoint directory from {} to old-{}", sha, sha); // In production builds, prune old working logs that have expired. // Debug builds never prune so developers can inspect old state. @@ -178,7 +175,7 @@ impl RepoStorage { }; if now_secs.saturating_sub(archived_at) >= Self::OLD_WORKING_LOG_RETENTION_SECS { - debug_log(&format!("Pruning expired old working log: {}", name_str)); + tracing::debug!("Pruning expired old working log: {}", name_str); let _ = fs::remove_dir_all(&dir_path); } } @@ -192,10 +189,7 @@ impl RepoStorage { let new_dir = self.working_logs.join(new_sha); if old_dir.exists() && !new_dir.exists() { fs::rename(&old_dir, &new_dir)?; - debug_log(&format!( - "Renamed working log from {} to {}", - old_sha, new_sha - )); + tracing::debug!("Renamed working log from {} to {}", old_sha, new_sha); } Ok(()) } @@ -478,10 +472,10 @@ impl PersistedWorkingLog { .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; if checkpoint.api_version != CHECKPOINT_API_VERSION { - debug_log(&format!( + tracing::debug!( "unsupported checkpoint api version: {} (silently skipping checkpoint)", checkpoint.api_version - )); + ); continue; } @@ -762,18 +756,12 @@ impl PersistedWorkingLog { Ok(content) => match serde_json::from_str(&content) { Ok(initial_data) => initial_data, Err(e) => { - debug_log(&format!( - "Failed to parse INITIAL file: {}. Returning empty.", - e - )); + tracing::debug!("Failed to parse INITIAL file: {}. Returning empty.", e); InitialAttributions::default() } }, Err(e) => { - debug_log(&format!( - "Failed to read INITIAL file: {}. Returning empty.", - e - )); + tracing::debug!("Failed to read INITIAL file: {}. Returning empty.", e); InitialAttributions::default() } } diff --git a/src/git/repository.rs b/src/git/repository.rs index dfdd9f692..b691bf6ff 100644 --- a/src/git/repository.rs +++ b/src/git/repository.rs @@ -10,7 +10,6 @@ use crate::git::repo_storage::RepoStorage; use crate::git::rewrite_log::RewriteLogEvent; use crate::git::status::MAX_PATHSPEC_ARGS; use crate::git::sync_authorship::{fetch_authorship_notes, push_authorship_notes}; -use crate::utils::debug_log; #[cfg(windows)] use crate::utils::is_interactive_terminal; use unicode_normalization::UnicodeNormalization; @@ -1250,10 +1249,11 @@ impl Repository { supress_output, ) { - debug_log(&format!( + tracing::debug!( "rewrite_authorship_if_needed failed for {:?}: {}", - rewrite_log_event, error - )); + rewrite_log_event, + error + ); crate::observability::log_error( &error, Some(serde_json::json!({ diff --git a/src/git/sync_authorship.rs b/src/git/sync_authorship.rs index 7b3a42989..a5bbda0c1 100644 --- a/src/git/sync_authorship.rs +++ b/src/git/sync_authorship.rs @@ -5,7 +5,6 @@ use crate::git::refs::{ use crate::{ error::GitAiError, git::{cli_parser::ParsedGitInvocation, repository::exec_git}, - utils::debug_log, }; use super::repository::Repository; @@ -105,26 +104,21 @@ pub fn fetch_missing_notes_for_commits(repository: &Repository, source_commits: return; } - debug_log(&format!( + tracing::debug!( "Source commits missing notes: {:?}, trying to fetch from remotes", missing - )); + ); if let Ok(remotes) = repository.remotes_with_urls() { for (remote_name, _) in remotes { - debug_log(&format!( - "Attempting safe notes fetch from remote {}", - remote_name - )); + tracing::debug!("Attempting safe notes fetch from remote {}", remote_name); match fetch_authorship_notes(repository, &remote_name) { - Ok(_) => debug_log(&format!( - "✓ Fetched and merged notes from remote {}", - remote_name - )), - Err(e) => debug_log(&format!( + Ok(_) => tracing::debug!("Fetched and merged notes from remote {}", remote_name), + Err(e) => tracing::debug!( "Notes fetch from remote {} failed (best-effort): {}", - remote_name, e - )), + remote_name, + e + ), } } } @@ -141,10 +135,11 @@ pub fn fetch_authorship_notes( // Generate tracking ref for this remote let tracking_ref = tracking_ref_for_remote(remote_name); - debug_log(&format!( + tracing::debug!( "fetching authorship notes for remote '{}' to tracking ref '{}'", - remote_name, tracking_ref - )); + remote_name, + tracking_ref + ); // Fetch notes to tracking ref with explicit refspec. // If the remote does not have refs/notes/ai yet, treat that as NotFound. @@ -158,28 +153,28 @@ pub fn fetch_authorship_notes( &fetch_refspec, ); - debug_log(&format!("fetch command: {:?}", fetch_authorship)); + tracing::debug!("fetch command: {:?}", fetch_authorship); match exec_git(&fetch_authorship) { Ok(output) => { - debug_log(&format!( + tracing::debug!( "fetch stdout: '{}'", String::from_utf8_lossy(&output.stdout) - )); - debug_log(&format!( + ); + tracing::debug!( "fetch stderr: '{}'", String::from_utf8_lossy(&output.stderr) - )); + ); } Err(e) => { if is_missing_remote_notes_ref_error(&e) { - debug_log(&format!( + tracing::debug!( "no authorship notes found on remote '{}', nothing to sync", remote_name - )); + ); return Ok(NotesExistence::NotFound); } - debug_log(&format!("authorship fetch failed: {}", e)); + tracing::debug!("authorship fetch failed: {}", e); return Err(e); } } @@ -190,33 +185,32 @@ pub fn fetch_authorship_notes( if crate::git::refs::ref_exists(repository, &tracking_ref) { if crate::git::refs::ref_exists(repository, local_notes_ref) { // Both exist - merge them - debug_log(&format!( + tracing::debug!( "merging authorship notes from {} into {}", - tracking_ref, local_notes_ref - )); + tracking_ref, + local_notes_ref + ); if let Err(e) = merge_notes_from_ref(repository, &tracking_ref) { - debug_log(&format!("notes merge failed: {}", e)); + tracing::debug!("notes merge failed: {}", e); // Fallback: manually merge notes when git notes merge crashes if let Err(e2) = fallback_merge_notes_ours(repository, &tracking_ref) { - debug_log(&format!("fallback merge also failed: {}", e2)); + tracing::debug!("fallback merge also failed: {}", e2); } } } else { // Only tracking ref exists - copy it to local - debug_log(&format!( + tracing::debug!( "initializing {} from tracking ref {}", - local_notes_ref, tracking_ref - )); + local_notes_ref, + tracking_ref + ); if let Err(e) = copy_ref(repository, &tracking_ref, local_notes_ref) { - debug_log(&format!("notes copy failed: {}", e)); + tracing::debug!("notes copy failed: {}", e); // Don't fail on copy errors, just log and continue } } } else { - debug_log(&format!( - "tracking ref {} was not created after fetch", - tracking_ref - )); + tracing::debug!("tracking ref {} was not created after fetch", tracking_ref); } Ok(NotesExistence::Found) @@ -245,11 +239,11 @@ pub fn push_authorship_notes(repository: &Repository, remote_name: &str) -> Resu for attempt in 0..PUSH_NOTES_MAX_ATTEMPTS { if attempt > 0 { - debug_log(&format!( + tracing::debug!( "retrying notes push (attempt {}/{})", attempt + 1, PUSH_NOTES_MAX_ATTEMPTS - )); + ); } fetch_and_merge_tracking_notes(repository, remote_name); @@ -257,15 +251,12 @@ pub fn push_authorship_notes(repository: &Repository, remote_name: &str) -> Resu // Push notes without force (requires fast-forward) let push_args = build_authorship_push_args(repository.global_args_for_exec(), remote_name); - debug_log(&format!( - "pushing authorship refs (no force): {:?}", - &push_args - )); + tracing::debug!("pushing authorship refs (no force): {:?}", &push_args); match exec_git(&push_args) { Ok(_) => return Ok(()), Err(e) => { - debug_log(&format!("authorship push failed: {}", e)); + tracing::debug!("authorship push failed: {}", e); if is_non_fast_forward_error(&e) && attempt + 1 < PUSH_NOTES_MAX_ATTEMPTS { // Another pusher updated remote notes between our merge and push. // Retry the full fetch-merge-push cycle. @@ -292,7 +283,7 @@ fn fetch_and_merge_tracking_notes(repository: &Repository, remote_name: &str) { &fetch_refspec, ); - debug_log(&format!("pre-push authorship fetch: {:?}", &fetch_args)); + tracing::debug!("pre-push authorship fetch: {:?}", &fetch_args); // Fetch is best-effort; if it fails (e.g., no remote notes yet), continue if exec_git(&fetch_args).is_err() { @@ -307,28 +298,30 @@ fn fetch_and_merge_tracking_notes(repository: &Repository, remote_name: &str) { if !ref_exists(repository, local_notes_ref) { // Only tracking ref exists - copy it to local - debug_log(&format!( + tracing::debug!( "pre-push: initializing {} from {}", - local_notes_ref, tracking_ref - )); + local_notes_ref, + tracking_ref + ); if let Err(e) = copy_ref(repository, &tracking_ref, local_notes_ref) { - debug_log(&format!("pre-push notes copy failed: {}", e)); + tracing::debug!("pre-push notes copy failed: {}", e); } return; } // Both exist - merge them - debug_log(&format!( + tracing::debug!( "pre-push: merging {} into {}", - tracking_ref, local_notes_ref - )); + tracking_ref, + local_notes_ref + ); if let Err(e) = merge_notes_from_ref(repository, &tracking_ref) { - debug_log(&format!("pre-push notes merge failed: {}", e)); + tracing::debug!("pre-push notes merge failed: {}", e); // Fallback: manually merge notes when git notes merge crashes // (e.g., due to corrupted/mixed-fanout notes trees, or git bugs // with fanout-level mismatches on older git versions like macOS) if let Err(e2) = fallback_merge_notes_ours(repository, &tracking_ref) { - debug_log(&format!("pre-push fallback merge also failed: {}", e2)); + tracing::debug!("pre-push fallback merge also failed: {}", e2); } } } diff --git a/src/mdm/agents/cursor.rs b/src/mdm/agents/cursor.rs index dcadd9844..78664c6e9 100644 --- a/src/mdm/agents/cursor.rs +++ b/src/mdm/agents/cursor.rs @@ -8,7 +8,6 @@ use crate::mdm::utils::{ settings_paths_for_products, should_process_settings_target, version_meets_requirement, write_atomic, }; -use crate::utils::debug_log; use serde_json::{Value, json}; use std::fs; use std::path::PathBuf; @@ -356,10 +355,10 @@ impl HookInstaller for CursorInstaller { }); } Err(e) => { - debug_log(&format!( + tracing::debug!( "Cursor: Error automatically installing extension: {}", e - )); + ); results.push(InstallResult { changed: false, diff: None, diff --git a/src/mdm/agents/jetbrains.rs b/src/mdm/agents/jetbrains.rs index 3076b8532..6683d9bc9 100644 --- a/src/mdm/agents/jetbrains.rs +++ b/src/mdm/agents/jetbrains.rs @@ -7,8 +7,6 @@ use crate::mdm::jetbrains::{ find_jetbrains_installations, install_plugin_to_directory, install_plugin_via_cli, is_plugin_installed, }; -use crate::utils::debug_log; - pub struct JetBrainsInstaller; impl JetBrainsInstaller { @@ -64,16 +62,13 @@ impl JetBrainsInstaller { }; } Ok(false) => { - debug_log(&format!( + tracing::debug!( "JetBrains: CLI install failed for {}, trying direct download", ide_name - )); + ); } Err(e) => { - debug_log(&format!( - "JetBrains: CLI install error for {}: {}", - ide_name, e - )); + tracing::debug!("JetBrains: CLI install error for {}: {}", ide_name, e); } } @@ -97,18 +92,20 @@ impl JetBrainsInstaller { }; } Err(e) => { - debug_log(&format!( + tracing::debug!( "JetBrains: Failed to extract plugin for {}: {}", - ide_name, e - )); + ide_name, + e + ); } } } Err(e) => { - debug_log(&format!( + tracing::debug!( "JetBrains: Failed to download plugin for {}: {}", - ide_name, e - )); + ide_name, + e + ); } } } @@ -167,11 +164,11 @@ impl HookInstaller for JetBrainsInstaller { let has_compatible = installations.iter().any(|i| i.is_compatible()); if !has_compatible { - debug_log(&format!( + tracing::debug!( "JetBrains: Found {} IDEs but none meet minimum version requirement (build {})", installations.len(), MIN_INTELLIJ_BUILD - )); + ); } // JetBrains doesn't have config file hooks - only the plugin via install_extras diff --git a/src/mdm/agents/vscode.rs b/src/mdm/agents/vscode.rs index f4cfc17e3..8d694f312 100644 --- a/src/mdm/agents/vscode.rs +++ b/src/mdm/agents/vscode.rs @@ -7,7 +7,6 @@ use crate::mdm::utils::{ is_github_codespaces, is_vsc_editor_extension_installed, parse_version, resolve_editor_cli, settings_paths_for_products, should_process_settings_target, version_meets_requirement, }; -use crate::utils::debug_log; use std::path::PathBuf; pub struct VSCodeInstaller; @@ -152,10 +151,10 @@ impl HookInstaller for VSCodeInstaller { }); } Err(e) => { - debug_log(&format!( + tracing::debug!( "VS Code: Error automatically installing extension: {}", e - )); + ); results.push(InstallResult { changed: false, diff: None, diff --git a/src/mdm/agents/windsurf.rs b/src/mdm/agents/windsurf.rs index 6ab5b6e19..4f82cee76 100644 --- a/src/mdm/agents/windsurf.rs +++ b/src/mdm/agents/windsurf.rs @@ -6,7 +6,7 @@ use crate::mdm::utils::{ generate_diff, home_dir, install_vsc_editor_extension, is_git_ai_checkpoint_command, is_github_codespaces, is_vsc_editor_extension_installed, resolve_editor_cli, write_atomic, }; -use crate::utils::debug_log; + use serde_json::{Value, json}; use std::fs; use std::path::PathBuf; @@ -354,10 +354,10 @@ impl HookInstaller for WindsurfInstaller { }); } Err(e) => { - debug_log(&format!( + tracing::debug!( "Windsurf: Error automatically installing extension: {}", e - )); + ); results.push(InstallResult { changed: false, diff: None, diff --git a/src/mdm/jetbrains/detection.rs b/src/mdm/jetbrains/detection.rs index 490935c85..0067d0911 100644 --- a/src/mdm/jetbrains/detection.rs +++ b/src/mdm/jetbrains/detection.rs @@ -1,7 +1,5 @@ use super::ide_types::{DetectedIde, JETBRAINS_IDES, JetBrainsIde}; use crate::mdm::utils::home_dir; -#[cfg(target_os = "macos")] -use crate::utils::debug_log; use std::path::{Path, PathBuf}; #[cfg(target_os = "macos")] use std::process::Command; @@ -124,10 +122,11 @@ fn detect_macos_ide(ide: &'static JetBrainsIde, app_path: &Path) -> Option Result } } - debug_log(&format!( - "JetBrains: Plugin extracted to {}", - plugin_dir.display() - )); + tracing::debug!("JetBrains: Plugin extracted to {}", plugin_dir.display()); Ok(()) } @@ -137,10 +133,7 @@ pub fn install_plugin_to_directory(zip_data: &[u8], plugin_dir: &Path) -> Result pub fn install_plugin_via_cli(binary_path: &Path, plugin_id: &str) -> Result { use std::process::Command; - debug_log(&format!( - "JetBrains: Trying CLI installation with {:?}", - binary_path - )); + tracing::debug!("JetBrains: Trying CLI installation with {:?}", binary_path); #[cfg(windows)] let result = Command::new(binary_path) @@ -155,16 +148,16 @@ pub fn install_plugin_via_cli(binary_path: &Path, plugin_id: &str) -> Result { if output.status.success() { - debug_log("JetBrains: CLI installation succeeded"); + tracing::debug!("JetBrains: CLI installation succeeded"); Ok(true) } else { let stderr = String::from_utf8_lossy(&output.stderr); - debug_log(&format!("JetBrains: CLI installation failed: {}", stderr)); + tracing::debug!("JetBrains: CLI installation failed: {}", stderr); Ok(false) } } Err(e) => { - debug_log(&format!("JetBrains: Failed to run CLI: {}", e)); + tracing::debug!("JetBrains: Failed to run CLI: {}", e); Ok(false) } } diff --git a/src/mdm/utils.rs b/src/mdm/utils.rs index 07893acbf..96c44bf54 100644 --- a/src/mdm/utils.rs +++ b/src/mdm/utils.rs @@ -1,6 +1,5 @@ use crate::authorship::imara_diff_utils::{LineChangeTag, compute_line_changes}; use crate::error::GitAiError; -use crate::utils::debug_log; use jsonc_parser::ParseOptions; use jsonc_parser::cst::CstRootNode; use std::fs; @@ -193,11 +192,11 @@ fn find_editor_cli_js(cli_name: &str) -> Option { for (electron_path, cli_js_path) in candidates { if electron_path.is_file() && cli_js_path.is_file() { - debug_log(&format!( + tracing::debug!( "{}: CLI not in PATH, using cli.js fallback at {}", cli_name, cli_js_path.display() - )); + ); return Some(EditorCliCommand::from_cli_js(&electron_path, &cli_js_path)); } } diff --git a/src/observability/wrapper_performance_targets.rs b/src/observability/wrapper_performance_targets.rs index ccea73fc9..8b34f1569 100644 --- a/src/observability/wrapper_performance_targets.rs +++ b/src/observability/wrapper_performance_targets.rs @@ -2,11 +2,7 @@ use std::{collections::HashMap, ops::Add, time::Duration}; use serde_json::json; -use crate::{ - authorship::working_log::CheckpointKind, - observability::log_performance, - utils::{debug_performance_log, debug_performance_log_structured}, -}; +use crate::{authorship::working_log::CheckpointKind, observability::log_performance}; pub const PERFORMANCE_FLOOR_MS: Duration = Duration::from_millis(270); @@ -62,17 +58,17 @@ pub fn log_performance_target_if_violated( "within_target": within_target, }); - debug_performance_log_structured(perf_json); + tracing::debug!(%perf_json, "performance"); if !within_target { - debug_performance_log(&format!( - "ᕽ Performance target violated for command: {}. Total duration: {}ms, Git duration: {}ms. Pre-command: {}ms, Post-command: {}ms.", + tracing::debug!( + "Performance target violated for command: {}. Total duration: {}ms, Git duration: {}ms. Pre-command: {}ms, Post-command: {}ms.", command, total_duration.as_millis(), git_duration.as_millis(), pre_command.as_millis(), post_command.as_millis(), - )); + ); log_performance( "performance_target_violated", total_duration, @@ -88,12 +84,12 @@ pub fn log_performance_target_if_violated( )])), ); } else { - debug_performance_log(&format!( - "✓ Performance target met for command: {}. Total duration: {}ms, Git duration: {}ms", + tracing::debug!( + "Performance target met for command: {}. Total duration: {}ms, Git duration: {}ms", command, total_duration.as_millis(), git_duration.as_millis(), - )); + ); } } @@ -116,7 +112,7 @@ pub fn log_performance_for_checkpoint( "checkpoint_kind": checkpoint_kind.to_string(), "within_target": within_target, }); - debug_performance_log_structured(perf_json); + tracing::debug!(%perf_json, "performance"); if !within_target { log_performance( @@ -133,17 +129,17 @@ pub fn log_performance_for_checkpoint( )])), ); - debug_performance_log(&format!( - "ᕽ Performance target violated for checkpoint: {}. Total duration. Files edited: {}", + tracing::debug!( + "Performance target violated for checkpoint: {}. Total duration. Files edited: {}", duration.as_millis(), files_edited, - )); + ); } else { - debug_performance_log(&format!( - "✓ Performance target met for checkpoint: {}. Total duration. Files edited: {}", + tracing::debug!( + "Performance target met for checkpoint: {}. Total duration. Files edited: {}", duration.as_millis(), files_edited, - )); + ); } } diff --git a/src/utils.rs b/src/utils.rs index d044b19fe..5ee82804f 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -4,62 +4,9 @@ use std::io::IsTerminal; use std::path::PathBuf; use std::process::{Command, Stdio}; -/// Check if debug logging is enabled via environment variable -/// -/// This is checked once at module initialization to avoid repeated environment variable lookups. -static DEBUG_ENABLED: std::sync::OnceLock = std::sync::OnceLock::new(); -static DEBUG_PERFORMANCE_LEVEL: std::sync::OnceLock = std::sync::OnceLock::new(); static IS_TERMINAL: std::sync::OnceLock = std::sync::OnceLock::new(); static IS_IN_BACKGROUND_AGENT: std::sync::OnceLock = std::sync::OnceLock::new(); -fn is_debug_enabled() -> bool { - *DEBUG_ENABLED.get_or_init(|| { - (cfg!(debug_assertions) - || std::env::var("GIT_AI_DEBUG").unwrap_or_default() == "1" - || std::env::var("GIT_AI_DEBUG_PERFORMANCE").unwrap_or_default() != "") - && std::env::var("GIT_AI_DEBUG").unwrap_or_default() != "0" - }) -} - -fn is_debug_performance_enabled() -> bool { - debug_performance_level() >= 1 -} - -fn debug_performance_level() -> u8 { - *DEBUG_PERFORMANCE_LEVEL.get_or_init(|| { - std::env::var("GIT_AI_DEBUG_PERFORMANCE") - .unwrap_or_default() - .parse::() - .unwrap_or(0) - }) -} - -pub fn debug_performance_log(msg: &str) { - if is_debug_performance_enabled() { - eprintln!("\x1b[1;33m[git-ai (perf)]\x1b[0m {}", msg); - } -} - -pub fn debug_performance_log_structured(json: serde_json::Value) { - if debug_performance_level() >= 2 { - eprintln!("\x1b[1;33m[git-ai (perf-json)]\x1b[0m {}", json); - } -} - -/// Debug logging utility function -/// -/// Prints debug messages with a colored prefix when debug assertions are enabled or when -/// the `GIT_AI_DEBUG` environment variable is set to "1". -/// -/// # Arguments -/// -/// * `msg` - The debug message to print -pub fn debug_log(msg: &str) { - if is_debug_enabled() { - eprintln!("\x1b[1;33m[git-ai]\x1b[0m {}", msg); - } -} - /// Print a git diff in a readable format /// /// Prints the diff between two commits/trees showing which files changed and their status. @@ -1095,30 +1042,6 @@ mod tests { assert_eq!(normalize_to_posix(""), ""); } - // ========================================================================= - // Debug Logging Tests - // ========================================================================= - - #[test] - fn test_debug_log_no_panic() { - // Debug logging should not panic - debug_log("test message"); - } - - #[test] - fn test_debug_performance_log_no_panic() { - debug_performance_log("test performance message"); - } - - #[test] - fn test_debug_performance_log_structured_no_panic() { - use serde_json::json; - debug_performance_log_structured(json!({ - "operation": "test", - "duration_ms": 100, - })); - } - // ========================================================================= // current_git_ai_exe Tests // =========================================================================