304 lines
10 KiB
Rust
304 lines
10 KiB
Rust
use crate::AppService;
|
|
use crate::error::AppError;
|
|
use crate::git::{ArchiveEntry, ArchiveFormat, ArchiveSummary};
|
|
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
|
|
use redis::AsyncCommands;
|
|
use serde::{Deserialize, Serialize};
|
|
use session::Session;
|
|
use utoipa::ToSchema;
|
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
|
pub struct ArchiveQuery {
|
|
pub commit_oid: String,
|
|
pub format: String,
|
|
#[serde(default)]
|
|
pub prefix: Option<String>,
|
|
#[serde(default)]
|
|
pub max_depth: Option<usize>,
|
|
#[serde(default)]
|
|
pub path_filter: Option<String>,
|
|
}
|
|
impl ArchiveQuery {
|
|
fn to_archive_format(&self) -> Result<ArchiveFormat, AppError> {
|
|
match self.format.to_lowercase().as_str() {
|
|
"tar" => Ok(ArchiveFormat::Tar),
|
|
"tar.gz" | "tgz" => Ok(ArchiveFormat::TarGz),
|
|
"zip" => Ok(ArchiveFormat::Zip),
|
|
_ => Err(AppError::InternalServerError(format!(
|
|
"unsupported archive format: {}",
|
|
self.format
|
|
))),
|
|
}
|
|
}
|
|
fn cache_key(&self) -> String {
|
|
let prefix = self.prefix.as_deref().unwrap_or("");
|
|
let filter = self.path_filter.as_deref().unwrap_or("");
|
|
let depth = self.max_depth.map_or("0".to_string(), |d| d.to_string());
|
|
if prefix.is_empty() && filter.is_empty() && self.max_depth.is_none() {
|
|
String::new()
|
|
} else {
|
|
use std::collections::hash_map::DefaultHasher;
|
|
use std::hash::{Hash, Hasher};
|
|
let mut h = DefaultHasher::new();
|
|
(prefix, filter, depth).hash(&mut h);
|
|
format!("-{:x}", h.finish())
|
|
}
|
|
}
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveListResponse {
|
|
pub commit_oid: String,
|
|
pub entries: Vec<ArchiveEntryResponse>,
|
|
pub total_entries: usize,
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveEntryResponse {
|
|
pub path: String,
|
|
pub oid: String,
|
|
pub size: u64,
|
|
pub mode: u32,
|
|
}
|
|
impl From<ArchiveEntry> for ArchiveEntryResponse {
|
|
fn from(e: ArchiveEntry) -> Self {
|
|
Self {
|
|
path: e.path,
|
|
oid: e.oid,
|
|
size: e.size,
|
|
mode: e.mode,
|
|
}
|
|
}
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveSummaryResponse {
|
|
pub commit_oid: String,
|
|
pub format: String,
|
|
pub total_entries: usize,
|
|
pub total_size: u64,
|
|
}
|
|
impl From<ArchiveSummary> for ArchiveSummaryResponse {
|
|
fn from(s: ArchiveSummary) -> Self {
|
|
let format_str = match s.format {
|
|
ArchiveFormat::Tar => "tar",
|
|
ArchiveFormat::TarGz => "tar.gz",
|
|
ArchiveFormat::Zip => "zip",
|
|
};
|
|
Self {
|
|
commit_oid: s.commit_oid,
|
|
format: format_str.to_string(),
|
|
total_entries: s.total_entries,
|
|
total_size: s.total_size,
|
|
}
|
|
}
|
|
}
|
|
#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)]
|
|
pub struct ArchiveResponse {
|
|
pub commit_oid: String,
|
|
pub format: String,
|
|
pub size: usize,
|
|
pub data: String,
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveCachedResponse {
|
|
pub commit_oid: String,
|
|
pub format: String,
|
|
pub cached: bool,
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveInvalidateResponse {
|
|
pub commit_oid: String,
|
|
pub format: String,
|
|
pub invalidated: bool,
|
|
}
|
|
#[derive(Debug, Clone, Serialize, utoipa::ToSchema)]
|
|
pub struct ArchiveInvalidateAllResponse {
|
|
pub commit_oid: String,
|
|
pub count: usize,
|
|
}
|
|
impl AppService {
|
|
pub async fn git_archive(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
query: ArchiveQuery,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let format = query.to_archive_format()?;
|
|
let format_str = match format {
|
|
ArchiveFormat::Tar => "tar",
|
|
ArchiveFormat::TarGz => "tar.gz",
|
|
ArchiveFormat::Zip => "zip",
|
|
};
|
|
let commit_oid = git::CommitOid::new(&query.commit_oid);
|
|
let cache_key = format!(
|
|
"git:archive:{}:{}:{}:{}:{}",
|
|
namespace,
|
|
repo_name,
|
|
query.commit_oid,
|
|
format_str,
|
|
query.cache_key(),
|
|
);
|
|
if let Ok(mut conn) = self.cache.conn().await {
|
|
if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await {
|
|
if let Ok(cached) = serde_json::from_str::<ArchiveResponse>(&cached) {
|
|
return Ok(cached);
|
|
}
|
|
}
|
|
}
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let opts = git::ArchiveOptions::new()
|
|
.prefix(query.prefix.as_deref().unwrap_or(""))
|
|
.max_depth(query.max_depth.unwrap_or(usize::MAX));
|
|
let data = domain.archive(&commit_oid, format, Some(opts))?;
|
|
let data_b64 = BASE64.encode(&data);
|
|
let response = ArchiveResponse {
|
|
commit_oid: query.commit_oid,
|
|
format: format_str.to_string(),
|
|
size: data.len(),
|
|
data: data_b64,
|
|
};
|
|
if let Ok(mut conn) = self.cache.conn().await {
|
|
if let Err(e) = conn
|
|
.set_ex::<String, String, ()>(
|
|
cache_key,
|
|
serde_json::to_string(&response).unwrap_or_default(),
|
|
60 * 60,
|
|
)
|
|
.await
|
|
{
|
|
slog::debug!(self.logs, "cache set failed (non-fatal): {}", e);
|
|
}
|
|
}
|
|
Ok(response)
|
|
}
|
|
pub async fn git_archive_list(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
query: ArchiveQuery,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveListResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let commit_oid = git::CommitOid::new(&query.commit_oid);
|
|
let opts = git::ArchiveOptions::new()
|
|
.prefix(query.prefix.as_deref().unwrap_or(""))
|
|
.max_depth(query.max_depth.unwrap_or(usize::MAX));
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let entries = domain.archive_list(&commit_oid, Some(opts))?;
|
|
let entry_responses: Vec<ArchiveEntryResponse> = entries
|
|
.into_iter()
|
|
.map(ArchiveEntryResponse::from)
|
|
.collect();
|
|
let total_entries = entry_responses.len();
|
|
Ok(ArchiveListResponse {
|
|
commit_oid: query.commit_oid,
|
|
entries: entry_responses,
|
|
total_entries,
|
|
})
|
|
}
|
|
pub async fn git_archive_summary(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
query: ArchiveQuery,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveSummaryResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let format = query.to_archive_format()?;
|
|
let format_str = match format {
|
|
ArchiveFormat::Tar => "tar",
|
|
ArchiveFormat::TarGz => "tar.gz",
|
|
ArchiveFormat::Zip => "zip",
|
|
};
|
|
let commit_oid = git::CommitOid::new(&query.commit_oid);
|
|
let opts = git::ArchiveOptions::new()
|
|
.prefix(query.prefix.as_deref().unwrap_or(""))
|
|
.max_depth(query.max_depth.unwrap_or(usize::MAX));
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let mut summary = domain.archive_summary(&commit_oid, format, Some(opts))?;
|
|
summary.format = format;
|
|
Ok(ArchiveSummaryResponse {
|
|
commit_oid: query.commit_oid,
|
|
format: format_str.to_string(),
|
|
total_entries: summary.total_entries,
|
|
total_size: summary.total_size,
|
|
})
|
|
}
|
|
pub async fn git_archive_cached(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
query: ArchiveQuery,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveCachedResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let format = query.to_archive_format()?;
|
|
let format_str = match format {
|
|
ArchiveFormat::Tar => "tar",
|
|
ArchiveFormat::TarGz => "tar.gz",
|
|
ArchiveFormat::Zip => "zip",
|
|
};
|
|
let commit_oid = git::CommitOid::new(&query.commit_oid);
|
|
let opts = git::ArchiveOptions::new()
|
|
.prefix(query.prefix.as_deref().unwrap_or(""))
|
|
.max_depth(query.max_depth.unwrap_or(usize::MAX));
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let cached = domain.archive_cached(&commit_oid, format, Some(opts));
|
|
Ok(ArchiveCachedResponse {
|
|
commit_oid: query.commit_oid,
|
|
format: format_str.to_string(),
|
|
cached,
|
|
})
|
|
}
|
|
pub async fn git_archive_invalidate(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
query: ArchiveQuery,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveInvalidateResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let format = query.to_archive_format()?;
|
|
let format_str = match format {
|
|
ArchiveFormat::Tar => "tar",
|
|
ArchiveFormat::TarGz => "tar.gz",
|
|
ArchiveFormat::Zip => "zip",
|
|
};
|
|
let commit_oid = git::CommitOid::new(&query.commit_oid);
|
|
let opts = git::ArchiveOptions::new()
|
|
.prefix(query.prefix.as_deref().unwrap_or(""))
|
|
.max_depth(query.max_depth.unwrap_or(usize::MAX));
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let invalidated = domain.archive_invalidate(&commit_oid, format, Some(opts))?;
|
|
Ok(ArchiveInvalidateResponse {
|
|
commit_oid: query.commit_oid,
|
|
format: format_str.to_string(),
|
|
invalidated,
|
|
})
|
|
}
|
|
pub async fn git_archive_invalidate_all(
|
|
&self,
|
|
namespace: String,
|
|
repo_name: String,
|
|
commit_oid: String,
|
|
ctx: &Session,
|
|
) -> Result<ArchiveInvalidateAllResponse, AppError> {
|
|
let repo = self
|
|
.utils_find_repo(namespace.clone(), repo_name.clone(), ctx)
|
|
.await?;
|
|
let commit = git::CommitOid::new(&commit_oid);
|
|
let domain = git::GitDomain::from_model(repo)?;
|
|
let count = domain.archive_invalidate_all(&commit)?;
|
|
Ok(ArchiveInvalidateAllResponse { commit_oid, count })
|
|
}
|
|
}
|