67e3306e
Merge feat/commit-issue-link: auto-link commits to issues via Issue: trailers
alex emery 2026-04-12 11:05
diff --git a/src/commit_link.rs b/src/commit_link.rs new file mode 100644 index 0000000..f23078e --- /dev/null +++ b/src/commit_link.rs @@ -0,0 +1,279 @@ //! Auto-link commits to issues via `Issue:` git trailers during sync. //! //! See: docs/superpowers/specs/2026-04-12-commit-issue-link-design.md use std::collections::{HashMap, HashSet}; use git2::{Oid, Repository, Sort}; use crate::dag; use crate::error::Error; use crate::event::{Action, Author, Event}; /// Walk an issue's event DAG and return every commit SHA that has an /// `IssueCommitLink` event attached. Called lazily on first match per issue /// during `scan_and_link`; the result is cached in the orchestrator's /// `HashMap<RefName, HashSet<String>>`. pub fn collect_linked_shas(repo: &Repository, issue_ref: &str) -> Result<HashSet<String>, Error> { let events = dag::walk_events(repo, issue_ref)?; let mut shas = HashSet::new(); for (_oid, event) in events { if let Action::IssueCommitLink { commit } = event.action { shas.insert(commit); } } Ok(shas) } /// Parse `Issue:` trailers from a commit message. /// /// Returns the list of trailer values in order of appearance. Follows git's /// own trailer-block semantics: only the final paragraph is considered, and /// *every* non-empty line in it must be trailer-shaped (a `token: value` /// line) for the paragraph to qualify. Any prose line in the final paragraph /// disqualifies the whole paragraph — this prevents false positives like /// `"Thanks Bob.\nIssue: abc"` in commit bodies. /// /// The key match is `(?i)issue`; the value must be a single non-whitespace /// token followed by optional trailing whitespace and end-of-line. Values /// like `abc fixes thing` are rejected so that loose commentary never /// becomes a silent issue-prefix lookup that warns every sync forever. pub fn parse_issue_trailers(message: &str) -> Vec<String> { // 1. Split into paragraphs (blank-line separated), preserving order. // Trim trailing whitespace from each line for the trailer-shape check, // but keep enough structure to recognize blank lines. let lines: Vec<&str> = message.lines().collect(); // 2. Find the last paragraph: the longest tail slice that contains at // least one non-empty line and has no blank line *before* its first // non-empty line in the tail. // // Walking from the end: skip trailing blank/whitespace-only lines, // then collect lines until we hit a blank line. let mut end = lines.len(); while end > 0 && lines[end - 1].trim().is_empty() { end -= 1; } if end == 0 { return Vec::new(); } let mut start = end; while start > 0 && !lines[start - 1].trim().is_empty() { start -= 1; } let paragraph = &lines[start..end]; // 3. Validate every non-empty line in the paragraph is trailer-shaped. for line in paragraph { if line.trim().is_empty() { continue; } if !is_trailer_shaped(line) { return Vec::new(); } } // 4. Extract `Issue:` values. let mut out = Vec::new(); for line in paragraph { if let Some(value) = match_issue_line(line) { out.push(value); } } out } /// Returns true if a line looks like a git trailer: `<token>: <value>`, where /// token starts with a letter and consists of `[A-Za-z0-9-]`, and value is at /// least one non-whitespace character. fn is_trailer_shaped(line: &str) -> bool { let trimmed = line.trim_start(); let Some(colon_pos) = trimmed.find(':') else { return false; }; // Use trim_end() so that `ISSUE : abc` is recognized as the token `ISSUE` // — matching what `match_issue_line` does. Without this, the space before // the colon would disqualify the line and make the whole paragraph fail // the trailer-shape check. let token = trimmed[..colon_pos].trim_end(); if token.is_empty() { return false; } let mut chars = token.chars(); let first = chars.next().unwrap(); if !first.is_ascii_alphabetic() { return false; } if !chars.all(|c| c.is_ascii_alphanumeric() || c == '-') { return false; } let value = trimmed[colon_pos + 1..].trim(); !value.is_empty() } /// If `line` is an `Issue: <token>` trailer with exactly one non-whitespace /// token in its value, returns the token. Otherwise returns None. fn match_issue_line(line: &str) -> Option<String> { let trimmed = line.trim_start(); let colon_pos = trimmed.find(':')?; let key = trimmed[..colon_pos].trim_end(); if !key.eq_ignore_ascii_case("issue") { return None; } let value_region = &trimmed[colon_pos + 1..]; let value = value_region.trim(); if value.is_empty() { return None; } // Reject values with interior whitespace: `abc fixes thing` must not // parse to `abc` silently — it must parse to nothing so the user sees // that their commentary is being ignored. if value.split_whitespace().count() != 1 { return None; } Some(value.to_string()) } const ACTIVE_ISSUE_PREFIX: &str = "refs/collab/issues/"; const ARCHIVED_ISSUE_PREFIX: &str = "refs/collab/archive/issues/"; /// Walk every commit reachable from `refs/heads/*`, parse `Issue:` trailers, /// resolve each to an issue, and emit an `IssueCommitLink` event for any /// (issue, commit) pair that doesn't already have one. /// /// **Never breaks sync.** Per-commit and per-issue errors are logged as /// one-line stderr warnings and iteration continues. The only errors that /// propagate are "couldn't even start" failures (opening the repo, building /// the revwalk). Callers treat a returned `Err` as "skip the link scan for /// this sync" and proceed. /// /// Returns the number of events actually emitted. pub fn scan_and_link( repo: &Repository, author: &Author, sk: &ed25519_dalek::SigningKey, ) -> Result<usize, Error> { // Build a revwalk seeded from every local branch tip. let mut revwalk = repo.revwalk()?; revwalk.set_sorting(Sort::TOPOLOGICAL)?; let mut seeded_any = false; for reference in repo.references_glob("refs/heads/*")? { let Ok(reference) = reference else { continue }; let Some(target) = reference.target() else { continue; }; // `revwalk.push` dedups commits across branch tips internally. if revwalk.push(target).is_ok() { seeded_any = true; } } if !seeded_any { // Detached HEAD with no local branches. Silent no-op per spec. return Ok(0); } // Per-sync dedup of commits already visited. let mut visited: HashSet<Oid> = HashSet::new(); // Cache of existing link SHAs per resolved issue ref. `None` = poisoned. let mut link_cache: HashMap<String, Option<HashSet<String>>> = HashMap::new(); let mut emitted: usize = 0; for oid_result in revwalk { let oid = match oid_result { Ok(o) => o, Err(e) => { eprintln!("warning: revwalk error, stopping scan: {}", e); break; } }; if !visited.insert(oid) { continue; } let commit = match repo.find_commit(oid) { Ok(c) => c, Err(e) => { eprintln!("warning: cannot load commit {}: {}", oid, e); continue; } }; let message = commit.message().unwrap_or(""); let trailers = parse_issue_trailers(message); if trailers.is_empty() { continue; } for prefix in trailers { match crate::state::resolve_issue_ref(repo, &prefix) { Ok((resolved_ref, _resolved_id)) => { if resolved_ref.starts_with(ARCHIVED_ISSUE_PREFIX) { eprintln!( "warning: commit {}: Issue: {} — issue is archived, skipping", oid, prefix ); continue; } if !resolved_ref.starts_with(ACTIVE_ISSUE_PREFIX) { // Unknown namespace. Should not happen with current // resolver, but belt-and-braces. eprintln!( "warning: commit {}: Issue: {} — resolved to unexpected ref {}, skipping", oid, prefix, resolved_ref ); continue; } let entry = link_cache.entry(resolved_ref.clone()).or_insert_with(|| { match collect_linked_shas(repo, &resolved_ref) { Ok(set) => Some(set), Err(e) => { eprintln!( "warning: cannot read link events for {}: {} — skipping issue for the rest of this sync", resolved_ref, e ); None } } }); let Some(set) = entry.as_mut() else { continue }; let sha = oid.to_string(); if set.contains(&sha) { continue; } let event = Event { timestamp: chrono::Utc::now().to_rfc3339(), author: author.clone(), action: Action::IssueCommitLink { commit: sha.clone(), }, clock: 0, }; match dag::append_event(repo, &resolved_ref, &event, sk) { Ok(_) => { set.insert(sha); emitted += 1; } Err(e) => { eprintln!( "warning: commit {}: failed to emit IssueCommitLink on {}: {}", oid, resolved_ref, e ); } } } Err(e) => { // resolve_issue_ref error message already distinguishes // "no issue found" from "ambiguous prefix". eprintln!( "warning: commit {}: Issue: {} — {}, skipping", oid, prefix, e ); } } } } Ok(emitted) } diff --git a/src/dag.rs b/src/dag.rs index e1ac63e..6a58345 100644 --- a/src/dag.rs +++ b/src/dag.rs @@ -331,6 +331,7 @@ fn commit_message(action: &Action) -> String { Action::IssueComment { .. } => "issue: comment".to_string(), Action::IssueClose { .. } => "issue: close".to_string(), Action::IssueReopen => "issue: reopen".to_string(), Action::IssueCommitLink { commit } => format!("issue: commit link {}", &commit[..commit.len().min(7)]), Action::PatchCreate { title, .. } => format!("patch: create \"{}\"", title), Action::PatchRevision { .. } => "patch: revision".to_string(), Action::PatchReview { verdict, .. } => format!("patch: review ({})", verdict), diff --git a/src/event.rs b/src/event.rs index a897247..6e2edfb 100644 --- a/src/event.rs +++ b/src/event.rs @@ -56,6 +56,10 @@ pub enum Action { }, #[serde(rename = "issue.reopen")] IssueReopen, #[serde(rename = "issue.commit_link")] IssueCommitLink { commit: String, }, #[serde(rename = "patch.create", alias = "PatchCreate")] PatchCreate { title: String, diff --git a/src/lib.rs b/src/lib.rs index 29c501f..50a2fe7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,6 @@ pub mod cache; pub mod cli; pub mod commit_link; pub mod dag; pub mod editor; pub mod error; @@ -144,6 +145,51 @@ pub fn run(cli: cli::Cli, repo: &Repository) -> Result<(), error::Error> { println!("\n{} ({}):\n{}", c.author.name, c.timestamp, c.body); } } if !i.linked_commits.is_empty() { println!("\n--- Linked Commits ---"); for lc in &i.linked_commits { let short_sha = if lc.commit.len() >= 7 { &lc.commit[..7] } else { &lc.commit }; let (subject, commit_author) = match git2::Oid::from_str(&lc.commit) .ok() .and_then(|oid| repo.find_commit(oid).ok()) { Some(commit) => { let subject = commit .summary() .map(|s| truncate_summary(s, 60)) .unwrap_or_default(); let author = commit .author() .name() .unwrap_or("unknown") .to_string(); (Some(subject), Some(author)) } None => (None, None), }; match (subject, commit_author) { (Some(subject), Some(author)) => { println!( "· linked {} \"{}\" by {} (linked by {}, {})", short_sha, subject, author, lc.event_author.name, lc.event_timestamp, ); } _ => { println!( "· linked {} (commit {} not in local repo) (linked by {}, {})", short_sha, short_sha, lc.event_author.name, lc.event_timestamp, ); } } } } Ok(()) } IssueCmd::Label { id, label } => { @@ -662,3 +708,15 @@ fn search(repo: &Repository, query: &str) -> Result<(), error::Error> { Ok(()) } pub(crate) fn truncate_summary(s: &str, max_chars: usize) -> String { let mut out = String::new(); for (count, c) in s.chars().enumerate() { if count + 1 > max_chars { out.push('…'); return out; } out.push(c); } out } diff --git a/src/log.rs b/src/log.rs index d92f0cf..d1c6a0b 100644 --- a/src/log.rs +++ b/src/log.rs @@ -115,6 +115,7 @@ fn action_type_name(action: &Action) -> String { Action::IssueAssign { .. } => "IssueAssign".to_string(), Action::IssueUnassign { .. } => "IssueUnassign".to_string(), Action::IssueReopen => "IssueReopen".to_string(), Action::IssueCommitLink { .. } => "IssueCommitLink".to_string(), Action::PatchCreate { .. } => "PatchCreate".to_string(), Action::PatchRevision { .. } => "PatchRevision".to_string(), Action::PatchReview { .. } => "PatchReview".to_string(), @@ -149,6 +150,9 @@ fn action_summary(action: &Action) -> String { Action::IssueAssign { assignee } => format!("assign \"{}\"", assignee), Action::IssueUnassign { assignee } => format!("unassign \"{}\"", assignee), Action::IssueReopen => "reopen".to_string(), Action::IssueCommitLink { commit } => { format!("commit link {}", &commit[..commit.len().min(7)]) } Action::PatchCreate { title, .. } => format!("create \"{}\"", title), Action::PatchRevision { body, .. } => match body { Some(b) => format!("revision: {}", truncate(b, 50)), diff --git a/src/state.rs b/src/state.rs index c642eed..9e53bc9 100644 --- a/src/state.rs +++ b/src/state.rs @@ -1,3 +1,4 @@ use std::collections::HashMap; use std::fmt; use chrono::{DateTime, Utc}; @@ -82,6 +83,16 @@ pub struct Comment { } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct LinkedCommit { /// Full 40-char commit SHA from the trailer. pub commit: String, /// Author of the `IssueCommitLink` event (who ran sync). pub event_author: Author, /// Timestamp of the `IssueCommitLink` event. pub event_timestamp: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct IssueState { pub id: String, pub title: String, @@ -96,6 +107,8 @@ pub struct IssueState { pub labels: Vec<String>, pub assignees: Vec<String>, pub comments: Vec<Comment>, #[serde(default)] pub linked_commits: Vec<LinkedCommit>, pub created_at: String, #[serde(default)] pub last_updated: String, @@ -247,6 +260,14 @@ impl IssueState { // Higher clock wins; on tie, lexicographically higher OID wins. let mut status_key: Option<(u64, String)> = None; // Accumulator for IssueCommitLink dedup. Keyed by commit SHA, valued by // the (clock, timestamp, oid_hex) sort key plus the LinkedCommit payload. // We keep the entry with the minimum sort key per SHA — i.e. the // earliest emission by (clock, timestamp, oid). Topological walk order // alone does not guarantee this for cross-machine concurrent events // that are reconciled via merge commits, so we sort explicitly. let mut link_acc: HashMap<String, ((u64, String, String), LinkedCommit)> = HashMap::new(); for (oid, event) in events { let ts = parse_timestamp(&event.timestamp); if latest.as_ref().is_none_or(|(prev, _)| ts > *prev) { @@ -264,6 +285,7 @@ impl IssueState { labels: Vec::new(), assignees: Vec::new(), comments: Vec::new(), linked_commits: Vec::new(), created_at: event.timestamp.clone(), last_updated: String::new(), author: event.author.clone(), @@ -336,12 +358,44 @@ impl IssueState { } } } Action::IssueCommitLink { commit } => { if state.is_some() { // Render-time dedup by commit SHA. We compare an // explicit (clock, timestamp, oid_hex) key per event // and keep the minimum so the surviving entry is the // earliest emission per the spec, regardless of how // the merged DAG happens to topo-order itself. let key = (event.clock, event.timestamp.clone(), oid.to_string()); let new_link = LinkedCommit { commit: commit.clone(), event_author: event.author.clone(), event_timestamp: event.timestamp.clone(), }; link_acc .entry(commit) .and_modify(|existing| { if key < existing.0 { *existing = (key.clone(), new_link.clone()); } }) .or_insert((key, new_link)); } } _ => {} } } if let Some(ref mut s) = state { s.last_updated = latest.map(|(_, raw)| raw).unwrap_or_default(); // Flush the linked-commit accumulator into state.linked_commits in // a stable order. Sort by the same (clock, timestamp, oid) key we // used for the per-SHA min so the rendered list is deterministic // across runs (HashMap iteration order is randomized). let mut entries: Vec<((u64, String, String), LinkedCommit)> = link_acc.into_values().collect(); entries.sort_by(|a, b| a.0.cmp(&b.0)); s.linked_commits = entries.into_iter().map(|(_, lc)| lc).collect(); } state.ok_or_else(|| git2::Error::from_str("no IssueOpen event found in DAG").into()) } diff --git a/src/sync.rs b/src/sync.rs index bf2cbe5..550e156 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -364,6 +364,14 @@ pub fn sync(repo: &Repository, remote_name: &str) -> Result<(), Error> { reconcile_refs(&repo, "issues", &author, &sk)?; reconcile_refs(&repo, "patches", &author, &sk)?; // Step 2.5: Scan local branches for Issue: trailers and emit link events. // Never breaks sync — scan_and_link absorbs per-commit/per-issue errors. match crate::commit_link::scan_and_link(&repo, &author, &sk) { Ok(n) if n > 0 => println!("Linked {} commit(s) to issues.", n), Ok(_) => {} Err(e) => eprintln!("warning: commit link scan failed: {}", e), } // Step 3: Push collab refs individually println!("Pushing to '{}'...", remote_name); let refs_to_push = collect_push_refs(&repo)?; diff --git a/src/tui/events.rs b/src/tui/events.rs index 520a77c..7333293 100644 --- a/src/tui/events.rs +++ b/src/tui/events.rs @@ -21,7 +21,7 @@ pub(crate) fn run_loop( repo: &Repository, ) -> Result<(), Error> { loop { terminal.draw(|frame| ui(frame, app))?; terminal.draw(|frame| ui(frame, app, Some(repo)))?; if event::poll(Duration::from_millis(100))? { if let Event::Key(key) = event::read()? { diff --git a/src/tui/mod.rs b/src/tui/mod.rs index ccb92a6..c96eaa8 100644 --- a/src/tui/mod.rs +++ b/src/tui/mod.rs @@ -63,6 +63,7 @@ mod tests { labels: vec![], assignees: vec![], comments: vec![], linked_commits: vec![], created_at: String::new(), last_updated: String::new(), author: make_author(), @@ -253,6 +254,7 @@ mod tests { labels: vec![], assignees: vec![], comments: Vec::new(), linked_commits: Vec::new(), created_at: "2026-01-01T00:00:00Z".to_string(), last_updated: "2026-01-01T00:00:00Z".to_string(), author: test_author(), @@ -294,7 +296,7 @@ mod tests { fn render_app(app: &mut App) -> Buffer { let backend = TestBackend::new(80, 24); let mut terminal = Terminal::new(backend).unwrap(); terminal.draw(|frame| ui(frame, app)).unwrap(); terminal.draw(|frame| ui(frame, app, None)).unwrap(); terminal.backend().buffer().clone() } @@ -958,7 +960,7 @@ mod tests { let mut app = make_app(3, 3); let backend = TestBackend::new(20, 10); let mut terminal = Terminal::new(backend).unwrap(); terminal.draw(|frame| ui(frame, &mut app)).unwrap(); terminal.draw(|frame| ui(frame, &mut app, None)).unwrap(); } // ── Integration: full browse flow ──────────────────────────────────── diff --git a/src/tui/widgets.rs b/src/tui/widgets.rs index 170ebff..a456357 100644 --- a/src/tui/widgets.rs +++ b/src/tui/widgets.rs @@ -1,4 +1,4 @@ use git2::Oid; use git2::{Oid, Repository}; use ratatui::prelude::*; use ratatui::widgets::{Block, Borders, List, ListItem, Paragraph, Wrap}; @@ -26,6 +26,7 @@ pub(crate) fn action_type_label(action: &Action) -> &str { Action::IssueUnlabel { .. } => "Issue Unlabel", Action::IssueAssign { .. } => "Issue Assign", Action::IssueUnassign { .. } => "Issue Unassign", Action::IssueCommitLink { .. } => "Issue Commit Link", } } @@ -111,13 +112,16 @@ pub(crate) fn format_event_detail(oid: &Oid, event: &crate::event::Event) -> Str Action::IssueUnassign { assignee } => { detail.push_str(&format!("\nRemoved Assignee: {}\n", assignee)); } Action::IssueCommitLink { commit } => { detail.push_str(&format!("\nCommit: {}\n", commit)); } Action::IssueReopen | Action::PatchMerge | Action::Merge => {} } detail } pub(crate) fn ui(frame: &mut Frame, app: &mut App) { pub(crate) fn ui(frame: &mut Frame, app: &mut App, repo: Option<&Repository>) { let chunks = Layout::default() .direction(Direction::Vertical) .constraints([ @@ -135,7 +139,7 @@ pub(crate) fn ui(frame: &mut Frame, app: &mut App) { .split(main_area); render_list(frame, app, panes[0]); render_detail(frame, app, panes[1]); render_detail(frame, app, panes[1], repo); render_footer(frame, app, footer_area); } @@ -215,7 +219,7 @@ fn render_list(frame: &mut Frame, app: &mut App, area: Rect) { } } fn render_detail(frame: &mut Frame, app: &mut App, area: Rect) { fn render_detail(frame: &mut Frame, app: &mut App, area: Rect, repo: Option<&Repository>) { let border_style = if app.pane == Pane::Detail { Style::default().fg(Color::Yellow) } else { @@ -299,7 +303,7 @@ fn render_detail(frame: &mut Frame, app: &mut App, area: Rect) { let visible = app.visible_issues(); let selected_idx = app.list_state.selected().unwrap_or(0); let content: Text = match visible.get(selected_idx) { Some(issue) => build_issue_detail(issue, &app.patches), Some(issue) => build_issue_detail(issue, &app.patches, repo), None => Text::raw("No matches for current filter."), }; @@ -338,7 +342,11 @@ fn render_detail(frame: &mut Frame, app: &mut App, area: Rect) { } } fn build_issue_detail(issue: &IssueState, patches: &[PatchState]) -> Text<'static> { fn build_issue_detail( issue: &IssueState, patches: &[PatchState], repo: Option<&Repository>, ) -> Text<'static> { let status = issue.status.as_str(); let mut lines: Vec<Line> = vec![ @@ -461,6 +469,46 @@ fn build_issue_detail(issue: &IssueState, patches: &[PatchState]) -> Text<'stati } } if !issue.linked_commits.is_empty() { lines.push(Line::raw("")); lines.push(Line::styled( "--- Linked Commits ---", Style::default() .fg(Color::Magenta) .add_modifier(Modifier::BOLD), )); for lc in &issue.linked_commits { let short_sha: String = lc.commit.chars().take(7).collect(); let (subject, commit_author) = repo .and_then(|r| { Oid::from_str(&lc.commit) .ok() .and_then(|oid| r.find_commit(oid).ok()) .map(|commit| { let subject = commit .summary() .map(|s| crate::truncate_summary(s, 60)) .unwrap_or_default(); let author = commit.author().name().unwrap_or("unknown").to_string(); (subject, author) }) }) .unwrap_or_else(|| (String::new(), String::new())); let line_text = if commit_author.is_empty() { format!( "· linked {} (commit {} not in local repo) (linked by {}, {})", short_sha, short_sha, lc.event_author.name, lc.event_timestamp ) } else { format!( "· linked {} \"{}\" by {} (linked by {}, {})", short_sha, subject, commit_author, lc.event_author.name, lc.event_timestamp ) }; lines.push(Line::raw(line_text)); } } Text::from(lines) } diff --git a/tests/commit_link_test.rs b/tests/commit_link_test.rs new file mode 100644 index 0000000..5e33150 --- /dev/null +++ b/tests/commit_link_test.rs @@ -0,0 +1,312 @@ //! Unit tests for the Action::IssueCommitLink event variant. use git2::Repository; use git_collab::event::{Action, Author, Event}; use git_collab::identity::author_signature; use git_collab::signing::sign_event; use git_collab::state::IssueState; use tempfile::TempDir; mod common; use common::{ add_commit_link, alice, init_repo, open_issue, test_signing_key, ScopedTestConfig, }; /// Append a commit-link event with an explicitly chosen `clock` value, /// bypassing `dag::append_event`'s automatic clock-bump. The new commit's /// parent is the current ref tip. Returns the new tip OID. fn append_commit_link_with_clock( repo: &Repository, ref_name: &str, author: &Author, commit_sha: &str, timestamp: &str, clock: u64, ) -> git2::Oid { let sk = test_signing_key(); let event = Event { timestamp: timestamp.to_string(), author: author.clone(), action: Action::IssueCommitLink { commit: commit_sha.to_string(), }, clock, }; let detached = sign_event(&event, &sk).unwrap(); let event_json = serde_json::to_vec_pretty(&event).unwrap(); let manifest = br#"{"version":1,"format":"git-collab"}"#; let event_blob = repo.blob(&event_json).unwrap(); let sig_blob = repo.blob(detached.signature.as_bytes()).unwrap(); let pubkey_blob = repo.blob(detached.pubkey.as_bytes()).unwrap(); let manifest_blob = repo.blob(manifest).unwrap(); let mut tb = repo.treebuilder(None).unwrap(); tb.insert("event.json", event_blob, 0o100644).unwrap(); tb.insert("signature", sig_blob, 0o100644).unwrap(); tb.insert("pubkey", pubkey_blob, 0o100644).unwrap(); tb.insert("manifest.json", manifest_blob, 0o100644).unwrap(); let tree_oid = tb.write().unwrap(); let tree = repo.find_tree(tree_oid).unwrap(); let sig = author_signature(author).unwrap(); let parent_oid = repo.refname_to_id(ref_name).unwrap(); let parent = repo.find_commit(parent_oid).unwrap(); repo.commit(Some(ref_name), &sig, &sig, "issue.commit_link", &tree, &[&parent]) .unwrap() } fn test_author() -> Author { Author { name: "Alice".to_string(), email: "alice@example.com".to_string(), } } fn sha(byte: u8) -> String { format!("{:02x}{}", byte, "00".repeat(19)) } #[test] fn issue_commit_link_variant_round_trips() { let event = Event { timestamp: "2026-04-12T12:00:00Z".to_string(), author: test_author(), action: Action::IssueCommitLink { commit: "4b2e1cd0123456789012345678901234567890ab".to_string(), }, clock: 3, }; let json = serde_json::to_string(&event).expect("serialize"); assert!( json.contains("\"type\":\"issue.commit_link\""), "expected serde tag issue.commit_link, got: {}", json ); assert!( json.contains("\"commit\":\"4b2e1cd0123456789012345678901234567890ab\""), "expected commit field, got: {}", json ); let parsed: Event = serde_json::from_str(&json).expect("deserialize"); match parsed.action { Action::IssueCommitLink { commit } => { assert_eq!(commit, "4b2e1cd0123456789012345678901234567890ab"); } other => panic!("expected IssueCommitLink, got {:?}", other), } } #[test] fn issue_state_surfaces_commit_links_in_order() { let _config = ScopedTestConfig::new(); let dir = TempDir::new().unwrap(); let repo = init_repo(dir.path(), &alice()); let (ref_name, id) = open_issue(&repo, &alice(), "bug"); add_commit_link(&repo, &ref_name, &alice(), &sha(0xaa)); add_commit_link(&repo, &ref_name, &alice(), &sha(0xbb)); let issue = IssueState::from_ref_uncached(&repo, &ref_name, &id).unwrap(); let commits: Vec<String> = issue .linked_commits .iter() .map(|lc| lc.commit.clone()) .collect(); assert_eq!(commits, vec![sha(0xaa), sha(0xbb)]); } #[test] fn issue_state_dedups_commit_links_keeps_lower_clock_even_when_appended_later() { // This locks in the (clock, timestamp, oid) tiebreak rule. We append two // events for the same commit SHA in linear order, but the SECOND event we // append has a LOWER clock than the first — simulating a cross-machine // case where Bob's locally-appended event was actually authored earlier // (in clock terms) than Alice's. A naive first-seen-wins implementation // would surface Alice's event because it appears first in the topo walk; // the spec-correct dedup must surface Bob's lower-clock event instead. let _config = ScopedTestConfig::new(); let dir = TempDir::new().unwrap(); let repo = init_repo(dir.path(), &alice()); let (ref_name, id) = open_issue(&repo, &alice(), "bug"); let target_sha = sha(0xdd); // Alice appends with clock 50 first. append_commit_link_with_clock( &repo, &ref_name, &alice(), &target_sha, "2026-04-12T12:00:00Z", 50, ); // Bob appends second, but with a LOWER clock (10) — as if his event was // authored earlier on his machine and we're now seeing the merged DAG. append_commit_link_with_clock( &repo, &ref_name, &common::bob(), &target_sha, "2026-04-12T11:00:00Z", 10, ); let issue = IssueState::from_ref_uncached(&repo, &ref_name, &id).unwrap(); assert_eq!(issue.linked_commits.len(), 1); assert_eq!(issue.linked_commits[0].commit, target_sha); // The lower-clock event wins, even though it was appended later. assert_eq!( issue.linked_commits[0].event_author.name, "Bob", "expected Bob's lower-clock event to win the tiebreak" ); } #[test] fn issue_state_dedups_commit_links_by_sha_keeping_earliest() { let _config = ScopedTestConfig::new(); let dir = TempDir::new().unwrap(); let repo = init_repo(dir.path(), &alice()); let (ref_name, id) = open_issue(&repo, &alice(), "bug"); // Two different emitters link the same commit. First-seen should win. add_commit_link(&repo, &ref_name, &alice(), &sha(0xcc)); let bob_link = common::bob(); add_commit_link(&repo, &ref_name, &bob_link, &sha(0xcc)); let issue = IssueState::from_ref_uncached(&repo, &ref_name, &id).unwrap(); assert_eq!(issue.linked_commits.len(), 1); assert_eq!(issue.linked_commits[0].commit, sha(0xcc)); assert_eq!(issue.linked_commits[0].event_author.name, "Alice"); } use git_collab::commit_link::parse_issue_trailers; #[test] fn parser_no_trailer_block() { assert_eq!(parse_issue_trailers("Just a plain commit"), Vec::<String>::new()); } #[test] fn parser_empty_message() { assert_eq!(parse_issue_trailers(""), Vec::<String>::new()); } #[test] fn parser_single_trailer_in_pure_block() { let msg = "Fix thing\n\nSome context in the body.\n\nIssue: abc"; assert_eq!(parse_issue_trailers(msg), vec!["abc".to_string()]); } #[test] fn parser_case_variants() { let msg1 = "subject\n\nissue: abc"; let msg2 = "subject\n\nISSUE : abc"; let msg3 = "subject\n\n Issue: abc "; assert_eq!(parse_issue_trailers(msg1), vec!["abc".to_string()]); assert_eq!(parse_issue_trailers(msg2), vec!["abc".to_string()]); assert_eq!(parse_issue_trailers(msg3), vec!["abc".to_string()]); } #[test] fn parser_two_trailers_in_pure_block() { let msg = "subject\n\nIssue: abc\nIssue: def"; assert_eq!(parse_issue_trailers(msg), vec!["abc".to_string(), "def".to_string()]); } #[test] fn parser_issue_in_body_but_not_final_paragraph() { let msg = "subject\n\nIssue: abc\n\nSigned-off-by: alice <a@example.com>"; // The final paragraph is the signed-off-by block, not the issue line. // It's a valid trailer block (Signed-off-by is trailer-shaped), but it // contains no Issue: key, so we extract nothing. assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } #[test] fn parser_wrong_key() { let msg = "subject\n\nIssues: abc"; assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } #[test] fn parser_prose_mention() { let msg = "subject\n\nthis fixes issue abc in the body"; assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } #[test] fn parser_single_paragraph_whole_message_is_trailer_block() { let msg = "Issue: abc"; assert_eq!(parse_issue_trailers(msg), vec!["abc".to_string()]); } #[test] fn parser_mixed_final_paragraph_rejects_all() { let msg = "subject\n\nThanks to Bob for the catch.\nIssue: a3f9"; // Final paragraph has a prose line, so it's not a trailer block and we // extract nothing. This is the "false positive in prose" guard. assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } #[test] fn parser_trailing_whitespace_paragraph_does_not_shadow_trailer_block() { // The final paragraph is empty/whitespace, so the walk should fall back // to the previous non-empty paragraph, which is a valid trailer block. let msg = "subject\n\nIssue: abc\n\n \n"; assert_eq!(parse_issue_trailers(msg), vec!["abc".to_string()]); } #[test] fn parser_pure_block_with_mixed_keys() { let msg = "subject\n\nSigned-off-by: alice <a@example.com>\nIssue: abc"; assert_eq!(parse_issue_trailers(msg), vec!["abc".to_string()]); } #[test] fn parser_rejects_value_with_trailing_garbage() { let msg = "subject\n\nIssue: abc fixes thing"; assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } #[test] fn parser_rejects_empty_value() { let msg = "subject\n\nIssue: "; assert_eq!(parse_issue_trailers(msg), Vec::<String>::new()); } use git_collab::commit_link::collect_linked_shas; #[test] fn collect_linked_shas_empty_for_fresh_issue() { let _config = ScopedTestConfig::new(); let dir = TempDir::new().unwrap(); let repo = init_repo(dir.path(), &alice()); let (ref_name, _id) = open_issue(&repo, &alice(), "bug"); let shas = collect_linked_shas(&repo, &ref_name).unwrap(); assert!(shas.is_empty()); } #[test] fn collect_linked_shas_returns_all_linked_commits_including_duplicates() { let _config = ScopedTestConfig::new(); let dir = TempDir::new().unwrap(); let repo = init_repo(dir.path(), &alice()); let (ref_name, _id) = open_issue(&repo, &alice(), "bug"); add_commit_link(&repo, &ref_name, &alice(), &sha(0xaa)); add_commit_link(&repo, &ref_name, &alice(), &sha(0xbb)); // Even a duplicate DAG entry (cross-machine race) is surfaced here — // this is the "source of truth" for whether we need to emit. add_commit_link(&repo, &ref_name, &alice(), &sha(0xaa)); let shas = collect_linked_shas(&repo, &ref_name).unwrap(); assert_eq!(shas.len(), 2); assert!(shas.contains(&sha(0xaa))); assert!(shas.contains(&sha(0xbb))); } diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 2b9756c..7b9d529 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -228,6 +228,25 @@ pub fn add_comment(repo: &Repository, ref_name: &str, author: &Author, body: &st dag::append_event(repo, ref_name, &event, &sk).unwrap(); } /// Append an IssueCommitLink event to an issue ref. Returns the new DAG tip OID. pub fn add_commit_link( repo: &Repository, ref_name: &str, author: &Author, commit_sha: &str, ) -> git2::Oid { let sk = test_signing_key(); let event = Event { timestamp: now(), author: author.clone(), action: Action::IssueCommitLink { commit: commit_sha.to_string(), }, clock: 0, }; dag::append_event(repo, ref_name, &event, &sk).unwrap() } /// Append a close event to an issue ref. pub fn close_issue(repo: &Repository, ref_name: &str, author: &Author) { let sk = test_signing_key(); diff --git a/tests/sync_test.rs b/tests/sync_test.rs index 070a63a..e3e2c77 100644 --- a/tests/sync_test.rs +++ b/tests/sync_test.rs @@ -1185,3 +1185,380 @@ fn test_corrupted_state_file_handled_gracefully() { "corrupted state file should be deleted" ); } // --------------------------------------------------------------------------- // Commit-link tests (src/commit_link.rs) // --------------------------------------------------------------------------- use git_collab::commit_link; fn make_commit_with_message(repo: &Repository, message: &str) -> git2::Oid { let sig = git2::Signature::now("Alice", "alice@example.com").unwrap(); // The TestCluster bare repo commits onto refs/heads/main but its HEAD // remains the default refs/heads/master, so clones don't get a local // refs/heads/main automatically. Make sure it exists before we extend it. let parent_oid = if let Ok(r) = repo.find_reference("refs/heads/main") { r.target().unwrap() } else { let remote_main = repo .find_reference("refs/remotes/origin/main") .expect("origin/main should exist on the cloned test repo"); let oid = remote_main.target().unwrap(); repo.reference("refs/heads/main", oid, false, "seed local main") .unwrap(); oid }; let parent = repo.find_commit(parent_oid).unwrap(); let tree_oid = parent.tree().unwrap().id(); let tree = repo.find_tree(tree_oid).unwrap(); repo.commit( Some("refs/heads/main"), &sig, &sig, message, &tree, &[&parent], ) .unwrap() } #[test] fn commit_link_scan_emits_event_for_matching_trailer() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); // Open an issue. let (issue_ref, issue_id) = open_issue(&alice_repo, &alice(), "fix the walker"); // Create a commit whose trailer references that issue. let message = format!("Fix walker\n\nIssue: {}", &issue_id[..8]); let commit_oid = make_commit_with_message(&alice_repo, &message); // Run the scanner directly (we test the sync integration in later tests). let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key( &signing::signing_key_dir().unwrap(), ) .unwrap(); let emitted = commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(); assert_eq!(emitted, 1); // Walk the issue's event log and find the link. let issue = IssueState::from_ref_uncached(&alice_repo, &issue_ref, &issue_id).unwrap(); assert_eq!(issue.linked_commits.len(), 1); assert_eq!(issue.linked_commits[0].commit, commit_oid.to_string()); } #[test] fn sync_entry_point_emits_commit_link_events_and_pushes_them() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); // Alice opens an issue and syncs it up so Bob will see it too. let (_issue_ref, issue_id) = open_issue(&alice_repo, &alice(), "bug"); sync::sync(&alice_repo, "origin").unwrap(); // Alice makes a real commit with an Issue: trailer. let message = format!("Fix the thing\n\nIssue: {}", &issue_id[..8]); make_commit_with_message(&alice_repo, &message); // Running sync should scan, emit the link, and push it. sync::sync(&alice_repo, "origin").unwrap(); // Bob fetches and should see the link in the materialized issue. let bob_repo = cluster.bob_repo(); sync::sync(&bob_repo, "origin").unwrap(); let bob_issue_ref = format!("refs/collab/issues/{}", issue_id); let bob_issue = IssueState::from_ref_uncached(&bob_repo, &bob_issue_ref, &issue_id).unwrap(); assert_eq!(bob_issue.linked_commits.len(), 1); } #[test] fn commit_link_scan_is_idempotent_across_runs() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); let (issue_ref, issue_id) = open_issue(&alice_repo, &alice(), "bug"); let message = format!("Fix thing\n\nIssue: {}", &issue_id[..8]); make_commit_with_message(&alice_repo, &message); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 1); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0); let issue = IssueState::from_ref_uncached(&alice_repo, &issue_ref, &issue_id).unwrap(); assert_eq!(issue.linked_commits.len(), 1); } #[test] fn commit_link_scan_walks_all_local_branches_and_dedups_shared_ancestors() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); let (issue_ref, issue_id) = open_issue(&alice_repo, &alice(), "bug"); // Commit on main with the trailer. Both branches will reach it. let message = format!("Fix\n\nIssue: {}", &issue_id[..8]); let linked_commit = make_commit_with_message(&alice_repo, &message); // Create a second branch pointing at the same commit. { let commit = alice_repo.find_commit(linked_commit).unwrap(); alice_repo.branch("feature-x", &commit, false).unwrap(); } let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); // Should emit exactly one event despite the commit being reachable from // two branch tips. assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 1); let issue = IssueState::from_ref_uncached(&alice_repo, &issue_ref, &issue_id).unwrap(); assert_eq!(issue.linked_commits.len(), 1); } #[test] fn commit_link_scan_handles_multiple_issue_trailers_on_one_commit() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); let (issue_ref_a, id_a) = open_issue(&alice_repo, &alice(), "bug a"); let (issue_ref_b, id_b) = open_issue(&alice_repo, &alice(), "bug b"); let message = format!( "Fix both\n\nIssue: {}\nIssue: {}", &id_a[..8], &id_b[..8] ); make_commit_with_message(&alice_repo, &message); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 2); let issue_a = IssueState::from_ref_uncached(&alice_repo, &issue_ref_a, &id_a).unwrap(); let issue_b = IssueState::from_ref_uncached(&alice_repo, &issue_ref_b, &id_b).unwrap(); assert_eq!(issue_a.linked_commits.len(), 1); assert_eq!(issue_b.linked_commits.len(), 1); } #[test] fn commit_link_scan_skips_unknown_prefix_without_error() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); // No issue exists. Commit uses a completely unrelated prefix. make_commit_with_message( &alice_repo, "Fix\n\nIssue: zzzzzzzz", ); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0); } #[test] fn commit_link_scan_skips_genuinely_ambiguous_prefix() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); // Open 17 issues. By pigeonhole on the 16 possible hex first-chars, // at least two of the resulting issue IDs must share a first character, // guaranteeing we can construct an ambiguous one-char prefix. let mut ids: Vec<String> = Vec::with_capacity(17); for i in 0..17 { let (_, id) = open_issue(&alice_repo, &alice(), &format!("issue {}", i)); ids.push(id); } // Find a first-char that has at least 2 matching IDs. let mut counts: std::collections::HashMap<char, Vec<&str>> = std::collections::HashMap::new(); for id in &ids { let c = id.chars().next().unwrap(); counts.entry(c).or_default().push(id.as_str()); } let (ambiguous_char, matching_ids) = counts .iter() .find(|(_, v)| v.len() >= 2) .map(|(c, v)| (*c, v.clone())) .expect("pigeonhole guarantees at least one shared first char among 17 hex IDs"); let ambiguous_prefix = ambiguous_char.to_string(); make_commit_with_message( &alice_repo, &format!("Touch\n\nIssue: {}", ambiguous_prefix), ); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); // Ambiguous prefix must be skipped silently — no event emitted. assert_eq!( commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0 ); // None of the candidate issues should have grown a commit-link event: // each one still consists only of its IssueOpen event. for id in &matching_ids { let issue_ref = format!("refs/collab/issues/{}", id); let state = IssueState::from_ref_uncached(&alice_repo, &issue_ref, id).unwrap(); assert!( state.linked_commits.is_empty(), "candidate issue {} should not have any linked commits, but has {}", id, state.linked_commits.len() ); } } #[test] fn commit_link_scan_skips_archived_issues_with_warning() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); let (_, issue_id) = open_issue(&alice_repo, &alice(), "old bug"); // Archive the issue via the state helper. state::archive_issue_ref(&alice_repo, &issue_id).unwrap(); make_commit_with_message( &alice_repo, &format!("Reference old\n\nIssue: {}", &issue_id[..8]), ); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0); // Confirm the archived ref did not accrue a new event: the archived // DAG tip should still be the archive-time tip. let archived_ref = format!("refs/collab/archive/issues/{}", issue_id); let archived_state = IssueState::from_ref_uncached(&alice_repo, &archived_ref, &issue_id).unwrap(); assert!(archived_state.linked_commits.is_empty()); } #[test] fn commit_link_scan_no_op_on_detached_head_with_no_branches() { let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); // Seed refs/heads/main so HEAD resolves to a commit we can detach onto. // make_commit_with_message creates refs/heads/main if missing. make_commit_with_message(&alice_repo, "seed for detached head test"); // Put HEAD in detached state pointing at the current main tip, then // delete all local branches so scan_and_link has nothing to walk. let head_oid = alice_repo .find_reference("refs/heads/main") .unwrap() .target() .unwrap(); alice_repo.set_head_detached(head_oid).unwrap(); alice_repo .find_reference("refs/heads/main") .unwrap() .delete() .unwrap(); let author = git_collab::identity::get_author(&alice_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); // No branches to walk — silent no-op. assert_eq!(commit_link::scan_and_link(&alice_repo, &author, &sk).unwrap(), 0); } #[test] fn commit_link_scan_dedups_against_remote_originated_events() { // Simulates the cross-machine dedup case: Bob's repo fetches a link // event that Alice already emitted, then runs scan locally with the // same commit reachable from his branches. He must not emit a // duplicate. let cluster = TestCluster::new(); let alice_repo = cluster.alice_repo(); let bob_repo = cluster.bob_repo(); // Both repos get the same issue. let (_, issue_id) = open_issue(&alice_repo, &alice(), "bug"); sync::sync(&alice_repo, "origin").unwrap(); sync::sync(&bob_repo, "origin").unwrap(); // Alice writes a commit and pushes it to the bare remote so Bob can // fetch it. First the regular git push; then sync for the link event. let message = format!("Fix thing\n\nIssue: {}", &issue_id[..8]); let linked_commit = make_commit_with_message(&alice_repo, &message); // Push the branch so Bob sees the commit too. let mut cmd = Command::new("git"); cmd.args(["push", "origin", "main"]) .current_dir(cluster.alice_dir.path()); assert!(cmd.status().unwrap().success()); sync::sync(&alice_repo, "origin").unwrap(); // Bob fetches both the branch and the collab link event. let mut cmd = Command::new("git"); cmd.args(["fetch", "origin", "main:main"]) .current_dir(cluster.bob_dir.path()); assert!(cmd.status().unwrap().success()); sync::sync(&bob_repo, "origin").unwrap(); // At this point Bob's issue already has the link event from Alice. // Re-running scan on Bob's repo must find the commit locally and // decide "already linked", emitting zero events. let author = git_collab::identity::get_author(&bob_repo).unwrap(); let sk = signing::load_signing_key(&signing::signing_key_dir().unwrap()).unwrap(); let emitted = commit_link::scan_and_link(&bob_repo, &author, &sk).unwrap(); assert_eq!(emitted, 0, "Bob must not duplicate Alice's link event"); // And the commit on Bob's side really is the one linked. let bob_ref = format!("refs/collab/issues/{}", issue_id); let bob_issue = IssueState::from_ref_uncached(&bob_repo, &bob_ref, &issue_id).unwrap(); assert_eq!(bob_issue.linked_commits.len(), 1); assert_eq!(bob_issue.linked_commits[0].commit, linked_commit.to_string()); } #[test] fn cli_issue_show_renders_linked_commits_section() { use common::TestRepo; let repo = TestRepo::new("Alice", "alice@example.com"); let issue_id = repo.issue_open("fix the thing"); // Resolve the full id via --json so we can format a trailer prefix. let full_id = { let out = repo.run_ok(&["issue", "show", &issue_id, "--json"]); let v: serde_json::Value = serde_json::from_str(&out).unwrap(); v["id"].as_str().unwrap().to_string() }; let msg = format!("Fix a thing\n\nIssue: {}", &full_id[..8]); repo.git(&["commit", "--allow-empty", "-m", &msg]); // Set up a bare remote so sync has something to push to. let bare = TempDir::new().unwrap(); Command::new("git") .args(["init", "--bare"]) .current_dir(bare.path()) .status() .unwrap(); repo.git(&["remote", "add", "origin", bare.path().to_str().unwrap()]); repo.git(&["push", "-u", "origin", "main"]); repo.run_ok(&["init"]); repo.run_ok(&["sync"]); let show = repo.run_ok(&["issue", "show", &issue_id]); assert!( show.contains("--- Linked Commits ---"), "expected linked commits section, got:\n{}", show ); assert!( show.contains("by Alice"), "expected commit author rendered, got:\n{}", show ); assert!( show.contains("(linked by Alice"), "expected event author rendered, got:\n{}", show ); }