gitdataai/libs/service/git/contributors.rs
2026-04-14 19:02:01 +08:00

133 lines
4.2 KiB
Rust

use crate::AppService;
use crate::error::AppError;
use redis::AsyncCommands;
use serde::{Deserialize, Serialize};
use session::Session;
use std::collections::HashMap;
#[derive(Debug, Clone, Deserialize)]
pub struct ContributorsQuery {
#[serde(default = "default_limit")]
pub limit: usize,
#[serde(default)]
pub ref_name: Option<String>,
}
fn default_limit() -> usize {
100
}
#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)]
pub struct ContributorStats {
pub name: String,
pub email: String,
pub commits: usize,
pub first_commit_at: Option<i64>,
pub last_commit_at: Option<i64>,
}
#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)]
pub struct ContributorsResponse {
pub total: usize,
pub contributors: Vec<ContributorStats>,
}
struct ContributorEntry {
name: String,
email: String,
commits: usize,
first_commit_at: Option<i64>,
last_commit_at: Option<i64>,
}
impl AppService {
pub async fn git_contributors(
&self,
namespace: String,
repo_name: String,
query: ContributorsQuery,
ctx: &Session,
) -> Result<ContributorsResponse, AppError> {
let repo = self
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
.await?;
let cache_key = format!(
"git:contributors:{}:{}:{:?}:{}",
namespace, repo_name, query.ref_name, query.limit,
);
if let Ok(mut conn) = self.cache.conn().await {
if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await {
if let Ok(cached) = serde_json::from_str(&cached) {
return Ok(cached);
}
}
}
let repo_clone = repo.clone();
let ref_name_clone = query.ref_name.clone();
let commits = tokio::task::spawn_blocking(move || {
let domain = git::GitDomain::from_model(repo_clone)?;
domain.commit_log(ref_name_clone.as_deref(), 0, query.limit)
})
.await
.map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))?
.map_err(AppError::from)?;
let mut author_map: HashMap<String, ContributorEntry> = HashMap::new();
for commit in commits {
let author = commit.author;
let time = author.time_secs;
// Use email as primary key (case-insensitive) for deduplication.
// If the same person uses multiple emails, they appear as separate contributors —
// this is the best we can do without an external identity service.
let key = author.email.to_lowercase();
let entry = author_map.entry(key).or_insert_with(|| ContributorEntry {
name: author.name.clone(),
email: author.email.clone(),
commits: 0,
first_commit_at: None,
last_commit_at: None,
});
entry.commits += 1;
entry.first_commit_at =
Some(entry.first_commit_at.map(|f| f.min(time)).unwrap_or(time));
entry.last_commit_at = Some(entry.last_commit_at.map(|l| l.max(time)).unwrap_or(time));
}
let mut contributors: Vec<ContributorStats> = author_map
.into_values()
.map(|e| ContributorStats {
name: e.name,
email: e.email,
commits: e.commits,
first_commit_at: e.first_commit_at,
last_commit_at: e.last_commit_at,
})
.collect();
contributors.sort_by(|a, b| b.commits.cmp(&a.commits));
let total = contributors.len();
let response = ContributorsResponse {
total,
contributors,
};
if let Ok(mut conn) = self.cache.conn().await {
if let Err(e) = conn
.set_ex::<String, String, ()>(
cache_key,
serde_json::to_string(&response).unwrap_or_default(),
60 * 60,
)
.await
{
slog::debug!(self.logs, "cache set failed (non-fatal): {}", e);
}
}
Ok(response)
}
}