gitdataai/libs/git/hook/sync/mod.rs
2026-04-15 09:08:09 +08:00

365 lines
12 KiB
Rust

pub mod branch;
pub mod commit;
pub mod fsck;
pub mod gc;
pub mod lfs;
pub mod lock;
pub mod status;
pub mod tag;
use db::cache::AppCache;
use db::database::AppDatabase;
use models::projects::project_skill::{Column as SkillCol, Entity as SkillEntity};
use models::projects::project_skill::ActiveModel as SkillActiveModel;
use models::repos::repo::Model as RepoModel;
use models::RepoId;
use models::ActiveModelTrait;
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, Set};
use slog::Logger;
use std::collections::HashMap;
use std::path::Path;
use crate::GitDomain;
// ── Skill discovery (local, no service crate dependency) ────────────────────────
use sha1::Digest;
/// Recursively scan `base` for files named `SKILL.md`.
/// The skill slug is `{short_repo_id}/{parent_dir_name}` to ensure uniqueness across repos.
/// Populates `commit_sha` (current HEAD) and `blob_hash` for each discovered file.
fn scan_skills_from_dir(
base: &Path,
repo_id: &RepoId,
commit_sha: &str,
) -> Result<Vec<DiscoveredSkill>, std::io::Error> {
let repo_id_prefix = &repo_id.to_string()[..8];
let mut discovered = Vec::new();
let mut stack = vec![base.to_path_buf()];
while let Some(dir) = stack.pop() {
let entries = match std::fs::read_dir(&dir) {
Ok(e) => e,
Err(_) => continue,
};
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else if path.file_name().and_then(|n| n.to_str()) == Some("SKILL.md") {
if let Some(dir_name) = path.parent()
.and_then(|p| p.file_name())
.and_then(|n| n.to_str())
.filter(|s| !s.starts_with('.'))
{
let slug = format!("{}/{}", repo_id_prefix, dir_name);
if let Ok(raw) = std::fs::read(&path) {
let blob_hash = git_blob_hash(&raw);
let mut skill = parse_skill_content(&slug, &raw);
skill.commit_sha = Some(commit_sha.to_string());
skill.blob_hash = Some(blob_hash);
discovered.push(skill);
}
}
}
}
}
Ok(discovered)
}
/// Compute the git blob SHA-1 hash of `content`.
/// Format: "blob {len}\0{data}"
fn git_blob_hash(content: &[u8]) -> String {
let size = content.len();
let header = format!("blob {}\0", size);
let mut hasher = sha1::Sha1::new();
hasher.update(header.as_bytes());
hasher.update(content);
hex::encode(hasher.finalize())
}
/// Parse a SKILL.md file (raw bytes) to extract name, description, content, and frontmatter metadata.
fn parse_skill_content(slug: &str, raw: &[u8]) -> DiscoveredSkill {
let content = String::from_utf8_lossy(raw);
let (frontmatter, body) = extract_frontmatter(&content);
let metadata: serde_json::Value = frontmatter
.and_then(|fm| serde_json::from_str(fm).ok())
.unwrap_or_default();
let name = metadata
.get("name")
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| slug.replace('-', " ").replace('_', " "));
let description = metadata
.get("description")
.and_then(|v| v.as_str())
.map(String::from);
DiscoveredSkill {
slug: slug.to_string(),
name,
description,
content: body.trim().to_string(),
metadata,
commit_sha: None,
blob_hash: None,
}
}
/// A skill discovered in a repository.
struct DiscoveredSkill {
slug: String,
name: String,
description: Option<String>,
content: String,
metadata: serde_json::Value,
commit_sha: Option<String>,
blob_hash: Option<String>,
}
fn extract_frontmatter(raw: &str) -> (Option<&str>, &str) {
let trimmed = raw.trim_start();
if !trimmed.starts_with("---") {
return (None, trimmed);
}
if let Some(end) = trimmed[3..].find("---") {
let fm = &trimmed[3..end + 3];
let rest = trimmed[3 + end + 3..].trim_start();
(Some(fm), rest)
} else {
(None, trimmed)
}
}
#[derive(Clone)]
pub struct HookMetaDataSync {
pub db: AppDatabase,
pub cache: AppCache,
pub repo: RepoModel,
pub domain: GitDomain,
pub logger: Logger,
}
impl HookMetaDataSync {
pub fn new(
db: AppDatabase,
cache: AppCache,
repo: RepoModel,
logger: Logger,
) -> Result<Self, crate::GitError> {
let domain = GitDomain::from_model(repo.clone())?;
Ok(Self {
db,
cache,
repo,
domain,
logger,
})
}
pub async fn sync(&self) -> Result<(), crate::GitError> {
let lock_value = self.acquire_lock().await?;
let res = self.sync_internal().await;
if let Err(ref e) = res {
slog::error!(self.logger, "sync failed: {}", e);
}
if let Err(release_err) = self.release_lock(&lock_value).await {
slog::error!(self.logger, "failed to release lock: {}", release_err);
}
res
}
async fn sync_internal(&self) -> Result<(), crate::GitError> {
let mut txn =
self.db.begin().await.map_err(|e| {
crate::GitError::IoError(format!("failed to begin transaction: {}", e))
})?;
self.sync_refs(&mut txn).await?;
self.sync_commits(&mut txn).await?;
self.sync_tags(&mut txn).await?;
self.sync_lfs_objects(&mut txn).await?;
self.run_fsck_and_rollback_if_corrupt(&mut txn).await?;
txn.commit().await.map_err(|e| {
crate::GitError::IoError(format!("failed to commit transaction: {}", e))
})?;
self.run_gc().await?;
self.sync_skills().await;
Ok(())
}
/// Returns a list of (branch_name, oid) for all local branches.
pub fn list_branch_tips(&self) -> Vec<(String, String)> {
let repo = self.domain.repo();
let mut tips = Vec::new();
if let Ok(refs) = repo.references() {
for ref_result in refs {
if let Ok(r) = ref_result {
if r.is_branch() && !r.is_remote() {
if let Some(name) = r.name() {
// name is like "refs/heads/main" -> extract "main"
let branch = name.strip_prefix("refs/heads/").unwrap_or(name);
if let Some(target) = r.target() {
tips.push((branch.to_string(), target.to_string()));
}
}
}
}
}
}
tips
}
/// Returns a list of (tag_name, oid) for all tags.
pub fn list_tag_tips(&self) -> Vec<(String, String)> {
let repo = self.domain.repo();
let mut tips = Vec::new();
if let Ok(refs) = repo.references() {
for ref_result in refs {
if let Ok(r) = ref_result {
if r.is_tag() {
if let Some(name) = r.name() {
// name is like "refs/tags/v1.0" -> extract "v1.0"
let tag = name.strip_prefix("refs/tags/").unwrap_or(name);
if let Some(target) = r.target() {
tips.push((tag.to_string(), target.to_string()));
}
}
}
}
}
}
tips
}
/// Scan the repository for `SKILL.md` files and sync skills to the project.
///
/// This is a best-effort operation — failures are logged but do not fail the sync.
pub async fn sync_skills(&self) {
let project_uid = self.repo.project;
let repo_root = match self.domain.repo().workdir() {
Some(p) => p,
None => return,
};
// Get current HEAD commit SHA for attribution
let commit_sha = self.domain.repo().head().ok()
.and_then(|h| h.target())
.map(|oid| oid.to_string())
.unwrap_or_default();
// Discover skills from the filesystem
let discovered = match scan_skills_from_dir(repo_root, &self.repo.id, &commit_sha) {
Ok(d) => d,
Err(e) => {
slog::warn!(self.logger, "failed to scan skills directory: {}", e);
return;
}
};
if discovered.is_empty() {
return;
}
let now = chrono::Utc::now();
let mut created = 0i64;
let mut updated = 0i64;
let mut removed = 0i64;
// Collect existing repo-sourced skills for this repo
let existing: Vec<_> = match SkillEntity::find()
.filter(SkillCol::ProjectUuid.eq(project_uid))
.filter(SkillCol::Source.eq("repo"))
.filter(SkillCol::RepoId.eq(self.repo.id))
.all(&self.db)
.await
{
Ok(e) => e,
Err(e) => {
slog::warn!(self.logger, "failed to query existing skills: {}", e);
return;
}
};
let existing_by_slug: HashMap<_, _> = existing
.into_iter()
.map(|s| (s.slug.clone(), s))
.collect();
let mut seen_slugs = std::collections::HashSet::new();
for skill in discovered {
seen_slugs.insert(skill.slug.clone());
let json_meta = serde_json::to_value(&skill.metadata).unwrap_or_default();
if let Some(existing_skill) = existing_by_slug.get(&skill.slug) {
if existing_skill.content != skill.content
|| existing_skill.metadata != json_meta
|| existing_skill.commit_sha.as_ref() != skill.commit_sha.as_ref()
|| existing_skill.blob_hash.as_ref() != skill.blob_hash.as_ref()
{
let mut active: SkillActiveModel = existing_skill.clone().into();
active.content = Set(skill.content);
active.metadata = Set(json_meta);
active.commit_sha = Set(skill.commit_sha);
active.blob_hash = Set(skill.blob_hash);
active.updated_at = Set(now);
if active.update(&self.db).await.is_ok() {
updated += 1;
}
}
} else {
let active = SkillActiveModel {
id: Set(0),
project_uuid: Set(project_uid),
slug: Set(skill.slug.clone()),
name: Set(skill.name),
description: Set(skill.description),
source: Set("repo".to_string()),
repo_id: Set(Some(self.repo.id)),
commit_sha: Set(skill.commit_sha),
blob_hash: Set(skill.blob_hash),
content: Set(skill.content),
metadata: Set(json_meta),
enabled: Set(true),
created_by: Set(None),
created_at: Set(now),
updated_at: Set(now),
};
if SkillEntity::insert(active).exec(&self.db).await.is_ok() {
created += 1;
}
}
}
// Remove skills no longer in the repo
for (slug, old_skill) in existing_by_slug {
if !seen_slugs.contains(&slug) {
if SkillEntity::delete_by_id(old_skill.id).exec(&self.db).await.is_ok() {
removed += 1;
}
}
}
if created > 0 || updated > 0 || removed > 0 {
slog::info!(
self.logger,
"skills synced: created={}, updated={}, removed={}",
created, updated, removed
);
}
}
}