src/state.rs
Ref: Size: 29.0 KiB
use std::collections::HashMap;
use std::fmt;
use chrono::{DateTime, Utc};
use git2::{Oid, Repository};
use serde::{Deserialize, Serialize};
use crate::cache;
use crate::dag;
use crate::event::{Action, Author, ReviewVerdict};
/// Parse an RFC3339 timestamp, returning the earliest representable time on
/// failure so that unparseable timestamps sort before any valid one.
fn parse_timestamp(s: &str) -> DateTime<Utc> {
DateTime::parse_from_rfc3339(s)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or(DateTime::<Utc>::MIN_UTC)
}
fn serialize_oid<S: serde::Serializer>(oid: &Oid, s: S) -> Result<S::Ok, S::Error> {
s.serialize_str(&oid.to_string())
}
fn serialize_oid_option<S: serde::Serializer>(oid: &Option<Oid>, s: S) -> Result<S::Ok, S::Error> {
match oid {
Some(o) => s.serialize_some(&o.to_string()),
None => s.serialize_none(),
}
}
fn deserialize_oid<'de, D: serde::Deserializer<'de>>(d: D) -> Result<Oid, D::Error> {
let s = String::deserialize(d)?;
Oid::from_str(&s).map_err(serde::de::Error::custom)
}
fn deserialize_oid_option<'de, D: serde::Deserializer<'de>>(d: D) -> Result<Option<Oid>, D::Error> {
let opt: Option<String> = Option::deserialize(d)?;
match opt {
Some(s) => Oid::from_str(&s).map(Some).map_err(serde::de::Error::custom),
None => Ok(None),
}
}
fn serialize_verdict<S: serde::Serializer>(v: &ReviewVerdict, s: S) -> Result<S::Ok, S::Error> {
s.serialize_str(v.as_str())
}
fn deserialize_verdict<'de, D: serde::Deserializer<'de>>(d: D) -> Result<ReviewVerdict, D::Error> {
let s = String::deserialize(d)?;
s.parse().map_err(serde::de::Error::custom)
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum IssueStatus {
Open,
Closed,
}
impl IssueStatus {
pub fn as_str(&self) -> &'static str {
match self {
IssueStatus::Open => "open",
IssueStatus::Closed => "closed",
}
}
}
impl fmt::Display for IssueStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct Comment {
pub author: Author,
pub body: String,
pub timestamp: String,
#[serde(serialize_with = "serialize_oid", deserialize_with = "deserialize_oid")]
pub commit_id: Oid,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LinkedCommit {
/// Full 40-char commit SHA from the trailer.
pub commit: String,
/// Author of the `IssueCommitLink` event (who ran sync).
pub event_author: Author,
/// Timestamp of the `IssueCommitLink` event.
pub event_timestamp: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IssueState {
pub id: String,
pub title: String,
pub body: String,
pub status: IssueStatus,
pub close_reason: Option<String>,
#[serde(
serialize_with = "serialize_oid_option",
deserialize_with = "deserialize_oid_option"
)]
pub closed_by: Option<Oid>,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub comments: Vec<Comment>,
#[serde(default)]
pub linked_commits: Vec<LinkedCommit>,
pub created_at: String,
#[serde(default)]
pub last_updated: String,
pub author: Author,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub relates_to: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Review {
pub author: Author,
#[serde(serialize_with = "serialize_verdict", deserialize_with = "deserialize_verdict")]
pub verdict: ReviewVerdict,
pub body: String,
pub timestamp: String,
#[serde(default)]
pub revision: Option<u32>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
#[allow(dead_code)]
pub enum PatchStatus {
Open,
Closed,
Merged,
}
impl PatchStatus {
pub fn as_str(&self) -> &'static str {
match self {
PatchStatus::Open => "open",
PatchStatus::Closed => "closed",
PatchStatus::Merged => "merged",
}
}
}
impl fmt::Display for PatchStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Revision {
pub number: u32,
pub commit: String,
pub tree: String,
pub body: Option<String>,
pub timestamp: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InlineComment {
pub author: Author,
pub file: String,
pub line: u32,
pub body: String,
pub timestamp: String,
#[serde(default)]
pub revision: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PatchState {
pub id: String,
pub title: String,
pub body: String,
pub status: PatchStatus,
pub base_ref: String,
pub fixes: Option<String>,
pub branch: String,
/// Base branch tip OID at patch creation time (None for old patches).
#[serde(default, skip_serializing_if = "Option::is_none")]
pub base_commit: Option<String>,
pub comments: Vec<Comment>,
pub inline_comments: Vec<InlineComment>,
pub reviews: Vec<Review>,
pub revisions: Vec<Revision>,
pub created_at: String,
#[serde(default)]
pub last_updated: String,
pub author: Author,
}
impl crate::cli::Listable for IssueState {
fn is_open(&self) -> bool {
self.status == IssueStatus::Open
}
fn last_updated(&self) -> &str {
&self.last_updated
}
fn created_at(&self) -> &str {
&self.created_at
}
fn title(&self) -> &str {
&self.title
}
}
impl crate::cli::Listable for PatchState {
fn is_open(&self) -> bool {
self.status == PatchStatus::Open
}
fn last_updated(&self) -> &str {
&self.last_updated
}
fn created_at(&self) -> &str {
&self.created_at
}
fn title(&self) -> &str {
&self.title
}
}
impl IssueState {
pub fn from_ref(
repo: &Repository,
ref_name: &str,
id: &str,
) -> Result<Self, crate::error::Error> {
// Check cache first
if let Some(cached) = cache::get_cached_state::<IssueState>(repo, ref_name) {
return Ok(cached);
}
let state = Self::from_ref_uncached(repo, ref_name, id)?;
// Cache the result
if let Ok(tip) = repo.refname_to_id(ref_name) {
cache::set_cached_state(repo, ref_name, tip, &state);
}
Ok(state)
}
/// Walk the DAG and materialize state without consulting the cache.
pub fn from_ref_uncached(
repo: &Repository,
ref_name: &str,
id: &str,
) -> Result<Self, crate::error::Error> {
let events = dag::walk_events(repo, ref_name)?;
let mut state: Option<IssueState> = None;
let mut latest: Option<(DateTime<Utc>, String)> = None;
// Track the (clock, commit_oid_hex) of the latest status-changing event.
// Higher clock wins; on tie, lexicographically higher OID wins.
let mut status_key: Option<(u64, String)> = None;
// Accumulator for IssueCommitLink dedup. Keyed by commit SHA, valued by
// the (clock, timestamp, oid_hex) sort key plus the LinkedCommit payload.
// We keep the entry with the minimum sort key per SHA — i.e. the
// earliest emission by (clock, timestamp, oid). Topological walk order
// alone does not guarantee this for cross-machine concurrent events
// that are reconciled via merge commits, so we sort explicitly.
let mut link_acc: HashMap<String, ((u64, String, String), LinkedCommit)> = HashMap::new();
for (oid, event) in events {
let ts = parse_timestamp(&event.timestamp);
if latest.as_ref().is_none_or(|(prev, _)| ts > *prev) {
latest = Some((ts, event.timestamp.clone()));
}
match event.action {
Action::IssueOpen { title, body, relates_to } => {
state = Some(IssueState {
id: id.to_string(),
title,
body,
status: IssueStatus::Open,
close_reason: None,
closed_by: None,
labels: Vec::new(),
assignees: Vec::new(),
comments: Vec::new(),
linked_commits: Vec::new(),
created_at: event.timestamp.clone(),
last_updated: String::new(),
author: event.author.clone(),
relates_to,
});
}
Action::IssueComment { body } => {
if let Some(ref mut s) = state {
s.comments.push(Comment {
author: event.author.clone(),
body,
timestamp: event.timestamp.clone(),
commit_id: oid,
});
}
}
Action::IssueClose { reason } => {
if let Some(ref mut s) = state {
let key = (event.clock, oid.to_string());
if status_key.as_ref().is_none_or(|k| key >= *k) {
s.status = IssueStatus::Closed;
s.close_reason = reason;
s.closed_by = Some(oid);
status_key = Some(key);
}
}
}
Action::IssueEdit { title, body } => {
if let Some(ref mut s) = state {
if let Some(t) = title {
s.title = t;
}
if let Some(b) = body {
s.body = b;
}
}
}
Action::IssueLabel { label } => {
if let Some(ref mut s) = state {
if !s.labels.contains(&label) {
s.labels.push(label);
}
}
}
Action::IssueUnlabel { label } => {
if let Some(ref mut s) = state {
s.labels.retain(|l| l != &label);
}
}
Action::IssueAssign { assignee } => {
if let Some(ref mut s) = state {
if !s.assignees.contains(&assignee) {
s.assignees.push(assignee);
}
}
}
Action::IssueUnassign { assignee } => {
if let Some(ref mut s) = state {
s.assignees.retain(|a| a != &assignee);
}
}
Action::IssueReopen => {
if let Some(ref mut s) = state {
let key = (event.clock, oid.to_string());
if status_key.as_ref().is_none_or(|k| key >= *k) {
s.status = IssueStatus::Open;
s.close_reason = None;
s.closed_by = None;
status_key = Some(key);
}
}
}
Action::IssueCommitLink { commit } => {
if state.is_some() {
// Render-time dedup by commit SHA. We compare an
// explicit (clock, timestamp, oid_hex) key per event
// and keep the minimum so the surviving entry is the
// earliest emission per the spec, regardless of how
// the merged DAG happens to topo-order itself.
let key = (event.clock, event.timestamp.clone(), oid.to_string());
let new_link = LinkedCommit {
commit: commit.clone(),
event_author: event.author.clone(),
event_timestamp: event.timestamp.clone(),
};
link_acc
.entry(commit)
.and_modify(|existing| {
if key < existing.0 {
*existing = (key.clone(), new_link.clone());
}
})
.or_insert((key, new_link));
}
}
_ => {}
}
}
if let Some(ref mut s) = state {
s.last_updated = latest.map(|(_, raw)| raw).unwrap_or_default();
// Flush the linked-commit accumulator into state.linked_commits in
// a stable order. Sort by the same (clock, timestamp, oid) key we
// used for the per-SHA min so the rendered list is deterministic
// across runs (HashMap iteration order is randomized).
let mut entries: Vec<((u64, String, String), LinkedCommit)> =
link_acc.into_values().collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
s.linked_commits = entries.into_iter().map(|(_, lc)| lc).collect();
}
state.ok_or_else(|| git2::Error::from_str("no IssueOpen event found in DAG").into())
}
}
impl PatchState {
/// Resolve the current head commit OID for this patch by looking up `refs/heads/{branch}`.
pub fn resolve_head(&self, repo: &Repository) -> Result<Oid, crate::error::Error> {
// Try parsing as a hex OID first (for patches created with head_commit)
if let Ok(oid) = Oid::from_str(&self.branch) {
if repo.find_commit(oid).is_ok() {
return Ok(oid);
}
}
// Fall back to branch name lookup
let ref_name = format!("refs/heads/{}", self.branch);
repo.refname_to_id(&ref_name).map_err(|e| {
crate::error::Error::Cmd(format!(
"branch '{}' not found: {}",
self.branch, e
))
})
}
/// Compute staleness: how many commits the branch is ahead of base,
/// and how many commits the base is ahead of the branch.
/// Returns (ahead, behind).
pub fn staleness(&self, repo: &Repository) -> Result<(usize, usize), crate::error::Error> {
let branch_tip = self.resolve_head(repo)?;
let base_ref = format!("refs/heads/{}", self.base_ref);
let base_tip = repo.refname_to_id(&base_ref)?;
let (ahead, behind) = repo.graph_ahead_behind(branch_tip, base_tip)?;
Ok((ahead, behind))
}
/// Auto-detect merge: if the patch is still Open and its head is
/// reachable from the base branch tip, the user merged it outside of
/// git-collab. We compare the current base tip to the base_commit
/// recorded at creation time — if it has moved to include the patch
/// head, the patch is merged.
fn check_auto_merge(&mut self, repo: &Repository) {
if self.status != PatchStatus::Open {
return;
}
let Ok(patch_head) = self.resolve_head(repo) else { return };
let base_ref = format!("refs/heads/{}", self.base_ref);
let Ok(base_tip) = repo.refname_to_id(&base_ref) else { return };
let base_moved = self.base_commit.as_ref()
.map(|bc| bc != &base_tip.to_string())
.unwrap_or(false);
let reachable = base_tip == patch_head
|| repo.graph_descendant_of(base_tip, patch_head).unwrap_or(false);
if base_moved && reachable {
self.status = PatchStatus::Merged;
}
}
pub fn from_ref(
repo: &Repository,
ref_name: &str,
id: &str,
) -> Result<Self, crate::error::Error> {
// Check cache first
if let Some(mut cached) = cache::get_cached_state::<PatchState>(repo, ref_name) {
// The cache may return a stale Open status if the patch was merged
// outside of git-collab since the DAG tip hasn't changed.
cached.check_auto_merge(repo);
if cached.status == PatchStatus::Merged {
// Update the cache with the corrected status
if let Ok(tip) = repo.refname_to_id(ref_name) {
cache::set_cached_state(repo, ref_name, tip, &cached);
}
}
return Ok(cached);
}
let state = Self::from_ref_uncached(repo, ref_name, id)?;
// Cache the result
if let Ok(tip) = repo.refname_to_id(ref_name) {
cache::set_cached_state(repo, ref_name, tip, &state);
}
Ok(state)
}
/// Walk the DAG and materialize state without consulting the cache.
pub fn from_ref_uncached(
repo: &Repository,
ref_name: &str,
id: &str,
) -> Result<Self, crate::error::Error> {
let events = dag::walk_events(repo, ref_name)?;
let mut state: Option<PatchState> = None;
let mut latest: Option<(DateTime<Utc>, String)> = None;
let mut status_key: Option<(u64, String)> = None;
for (oid, event) in events {
let ts = parse_timestamp(&event.timestamp);
if latest.as_ref().is_none_or(|(prev, _)| ts > *prev) {
latest = Some((ts, event.timestamp.clone()));
}
match event.action {
Action::PatchCreate {
title,
body,
base_ref,
branch,
fixes,
commit,
tree,
base_commit,
} => {
let revisions = vec![Revision {
number: 1,
commit: commit.clone(),
tree,
body: None,
timestamp: event.timestamp.clone(),
}];
state = Some(PatchState {
id: id.to_string(),
title,
body,
status: PatchStatus::Open,
base_ref,
fixes,
branch,
base_commit,
comments: Vec::new(),
inline_comments: Vec::new(),
reviews: Vec::new(),
revisions,
created_at: event.timestamp.clone(),
last_updated: String::new(),
author: event.author.clone(),
});
}
Action::PatchRevision { commit, tree, body } => {
if let Some(ref mut s) = state {
// Dedup by commit OID — skip if already seen
let already_seen = s.revisions.iter().any(|r| r.commit == commit);
if !already_seen {
let number = s.revisions.len() as u32 + 1;
s.revisions.push(Revision {
number,
commit,
tree,
body,
timestamp: event.timestamp.clone(),
});
}
}
}
Action::PatchReview { verdict, body, revision } => {
if let Some(ref mut s) = state {
s.reviews.push(Review {
author: event.author.clone(),
verdict,
body,
timestamp: event.timestamp.clone(),
revision: Some(revision),
});
}
}
Action::PatchComment { body } => {
if let Some(ref mut s) = state {
s.comments.push(Comment {
author: event.author.clone(),
body,
timestamp: event.timestamp.clone(),
commit_id: oid,
});
}
}
Action::PatchInlineComment { file, line, body, revision } => {
if let Some(ref mut s) = state {
s.inline_comments.push(InlineComment {
author: event.author.clone(),
file,
line,
body,
timestamp: event.timestamp.clone(),
revision: Some(revision),
});
}
}
Action::PatchClose { .. } => {
if let Some(ref mut s) = state {
let key = (event.clock, oid.to_string());
if status_key.as_ref().is_none_or(|k| key >= *k) {
s.status = PatchStatus::Closed;
status_key = Some(key);
}
}
}
Action::PatchMerge => {
if let Some(ref mut s) = state {
let key = (event.clock, oid.to_string());
if status_key.as_ref().is_none_or(|k| key >= *k) {
s.status = PatchStatus::Merged;
status_key = Some(key);
}
}
}
_ => {}
}
}
if let Some(ref mut s) = state {
s.last_updated = latest.map(|(_, raw)| raw).unwrap_or_default();
s.check_auto_merge(repo);
}
state.ok_or_else(|| git2::Error::from_str("no PatchCreate event found in DAG").into())
}
}
/// Enumerate collab refs under a given prefix, returning (ref_name, id) pairs.
fn refs_under(
repo: &Repository,
prefix: &str,
) -> Result<Vec<(String, String)>, crate::error::Error> {
let glob = format!("{}*", prefix);
let refs = repo.references_glob(&glob)?;
let mut result = Vec::new();
for r in refs {
let r = r?;
let ref_name = r.name().unwrap_or_default().to_string();
let id = ref_name
.strip_prefix(prefix)
.unwrap_or_default()
.to_string();
result.push((ref_name, id));
}
Ok(result)
}
fn collab_refs(
repo: &Repository,
kind: &str,
) -> Result<Vec<(String, String)>, crate::error::Error> {
refs_under(repo, &format!("refs/collab/{}/", kind))
}
fn collab_archive_refs(
repo: &Repository,
kind: &str,
) -> Result<Vec<(String, String)>, crate::error::Error> {
refs_under(repo, &format!("refs/collab/archive/{}/", kind))
}
/// Resolve a short ID prefix to a full ref. Searches both active and archive namespaces.
/// Returns (ref_name, id).
fn resolve_ref(
repo: &Repository,
kind: &str,
singular: &str,
prefix: &str,
) -> Result<(String, String), crate::error::Error> {
let active: Vec<_> = collab_refs(repo, kind)?
.into_iter()
.filter(|(_, id)| id.starts_with(prefix))
.collect();
let archived: Vec<_> = collab_archive_refs(repo, kind)?
.into_iter()
.filter(|(_, id)| id.starts_with(prefix))
.collect();
// Deduplicate: if the same ID appears in both active and archive, prefer archive
let mut seen = std::collections::HashSet::new();
let mut matches = Vec::new();
for entry in archived.into_iter().chain(active.into_iter()) {
if seen.insert(entry.1.clone()) {
matches.push(entry);
}
}
match matches.len() {
0 => Err(
git2::Error::from_str(&format!("no {} found matching '{}'", singular, prefix)).into(),
),
1 => Ok(matches.into_iter().next().unwrap()),
_ => Err(git2::Error::from_str(&format!(
"ambiguous {} prefix '{}': {} matches",
singular,
prefix,
matches.len()
))
.into()),
}
}
/// List active issue refs, excluding any that also have an archived ref.
pub fn list_issues(repo: &Repository) -> Result<Vec<IssueState>, crate::error::Error> {
let archived_ids: std::collections::HashSet<String> = collab_archive_refs(repo, "issues")?
.into_iter()
.map(|(_, id)| id)
.collect();
let items = collab_refs(repo, "issues")?
.into_iter()
.filter(|(_, id)| !archived_ids.contains(id))
.filter_map(|(ref_name, id)| {
IssueState::from_ref(repo, &ref_name, &id)
.inspect_err(|e| eprintln!("warning: skipping issue {:.8}: {}", id, e))
.ok()
})
.collect();
Ok(items)
}
/// List active patch refs, excluding any that also have an archived ref.
pub fn list_patches(repo: &Repository) -> Result<Vec<PatchState>, crate::error::Error> {
let archived_ids: std::collections::HashSet<String> = collab_archive_refs(repo, "patches")?
.into_iter()
.map(|(_, id)| id)
.collect();
let items = collab_refs(repo, "patches")?
.into_iter()
.filter(|(_, id)| !archived_ids.contains(id))
.filter_map(|(ref_name, id)| {
PatchState::from_ref(repo, &ref_name, &id)
.inspect_err(|e| eprintln!("warning: skipping patch {:.8}: {}", id, e))
.ok()
})
.collect();
Ok(items)
}
/// List all issue refs (active + archived) and return their materialized state.
/// Deduplicates by ID, preferring the archived version (which has the final state).
pub fn list_issues_with_archived(repo: &Repository) -> Result<Vec<IssueState>, crate::error::Error> {
let mut seen = std::collections::HashSet::new();
let mut items = Vec::new();
// Archived first so they take priority
for (ref_name, id) in collab_archive_refs(repo, "issues")? {
if seen.insert(id.clone()) {
match IssueState::from_ref(repo, &ref_name, &id) {
Ok(state) => items.push(state),
Err(e) => eprintln!("warning: skipping issue {:.8}: {}", id, e),
}
}
}
for (ref_name, id) in collab_refs(repo, "issues")? {
if seen.insert(id.clone()) {
match IssueState::from_ref(repo, &ref_name, &id) {
Ok(state) => items.push(state),
Err(e) => eprintln!("warning: skipping issue {:.8}: {}", id, e),
}
}
}
Ok(items)
}
/// List all patch refs (active + archived) and return their materialized state.
/// Deduplicates by ID, preferring the archived version (which has the final state).
pub fn list_patches_with_archived(repo: &Repository) -> Result<Vec<PatchState>, crate::error::Error> {
let mut seen = std::collections::HashSet::new();
let mut items = Vec::new();
for (ref_name, id) in collab_archive_refs(repo, "patches")? {
if seen.insert(id.clone()) {
match PatchState::from_ref(repo, &ref_name, &id) {
Ok(state) => items.push(state),
Err(e) => eprintln!("warning: skipping patch {:.8}: {}", id, e),
}
}
}
for (ref_name, id) in collab_refs(repo, "patches")? {
if seen.insert(id.clone()) {
match PatchState::from_ref(repo, &ref_name, &id) {
Ok(state) => items.push(state),
Err(e) => eprintln!("warning: skipping patch {:.8}: {}", id, e),
}
}
}
Ok(items)
}
/// Move an issue ref from active to archive namespace.
pub fn archive_issue_ref(repo: &Repository, id: &str) -> Result<(), crate::error::Error> {
let old_ref = format!("refs/collab/issues/{}", id);
let oid = repo.refname_to_id(&old_ref)?;
let new_ref = format!("refs/collab/archive/issues/{}", id);
repo.reference(&new_ref, oid, false, "archive issue")?;
repo.find_reference(&old_ref)?.delete()?;
Ok(())
}
/// Move a patch ref from active to archive namespace.
pub fn archive_patch_ref(repo: &Repository, id: &str) -> Result<(), crate::error::Error> {
let old_ref = format!("refs/collab/patches/{}", id);
let oid = repo.refname_to_id(&old_ref)?;
let new_ref = format!("refs/collab/archive/patches/{}", id);
repo.reference(&new_ref, oid, false, "archive patch")?;
repo.find_reference(&old_ref)?.delete()?;
Ok(())
}
/// Resolve a short ID prefix to the full issue ref name. Returns (ref_name, id).
pub fn resolve_issue_ref(
repo: &Repository,
prefix: &str,
) -> Result<(String, String), crate::error::Error> {
resolve_ref(repo, "issues", "issue", prefix)
}
/// Resolve a short ID prefix to the full patch ref name. Returns (ref_name, id).
pub fn resolve_patch_ref(
repo: &Repository,
prefix: &str,
) -> Result<(String, String), crate::error::Error> {
resolve_ref(repo, "patches", "patch", prefix)
}