Model sync: - Filter OpenRouter models by what the user's AI client can actually access, before upserting metadata (avoids bloating with inaccessible models). - Fall back to direct endpoint sync when no OpenRouter metadata matches (handles Bailian/MiniMax and other non-OpenRouter providers). Git stability fixes: - SSH: add 5s timeout on stdin flush/shutdown in channel_eof and cleanup_channel to prevent blocking the event loop on unresponsive git. - SSH: remove dbg!() calls from production code paths. - HTTP auth: pass proper Logger to SshAuthService instead of discarding all auth events to slog::Discard. Dependencies: - reqwest: add native-tls feature for HTTPS on Windows/Linux/macOS.
1114 lines
36 KiB
Rust
1114 lines
36 KiB
Rust
//! Synchronizes AI model metadata from OpenRouter into the local database.
|
|
//!
|
|
//! Flow:
|
|
//! 1. Use the configured `async_openai` client (with the real API key) to call
|
|
//! `GET /models` — this returns only the models the current key can access.
|
|
//! 2. Fetch full metadata (pricing, context_length, capabilities) for those
|
|
//! model IDs from OpenRouter's public `/api/v1/models` endpoint (no auth).
|
|
//! 3. Upsert provider / model / version / pricing / capability / profile
|
|
//! records only for models the client can actually call.
|
|
//!
|
|
//! Usage: call `start_sync_task()` to launch a background task that syncs
|
|
//! immediately and then every 10 minutes. On app startup, run it once
|
|
//! eagerly before accepting traffic.
|
|
|
|
use async_openai::Client;
|
|
use async_openai::config::OpenAIConfig;
|
|
use async_openai::types::models::Model as OpenAiModel;
|
|
use slog::Logger;
|
|
use std::time::Duration;
|
|
use tokio::task::JoinHandle;
|
|
use tokio::time::interval;
|
|
use tokio::time::sleep;
|
|
|
|
use crate::error::AppError;
|
|
use crate::AppService;
|
|
use chrono::Utc;
|
|
use db::database::AppDatabase;
|
|
use models::agents::model::Entity as ModelEntity;
|
|
use models::agents::model_capability::Entity as CapabilityEntity;
|
|
use models::agents::model_parameter_profile::Entity as ProfileEntity;
|
|
use models::agents::model_pricing::Entity as PricingEntity;
|
|
use models::agents::model_provider::Entity as ProviderEntity;
|
|
use models::agents::model_provider::Model as ProviderModel;
|
|
use models::agents::model_version::Entity as VersionEntity;
|
|
use models::agents::{CapabilityType, ModelCapability, ModelModality, ModelStatus};
|
|
use sea_orm::prelude::*;
|
|
use sea_orm::Set;
|
|
use serde::Deserialize;
|
|
use serde::Serialize;
|
|
use session::Session;
|
|
use utoipa::ToSchema;
|
|
use uuid::Uuid;
|
|
|
|
const OPENROUTER_URL: &str = "https://openrouter.ai/api/v1/models";
|
|
|
|
// OpenRouter API types -------------------------------------------------------
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterResponse {
|
|
data: Vec<OpenRouterModel>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
#[allow(dead_code)]
|
|
struct OpenRouterModel {
|
|
id: String,
|
|
name: Option<String>,
|
|
#[serde(default)]
|
|
description: Option<String>,
|
|
pricing: Option<OpenRouterPricing>,
|
|
#[serde(default)]
|
|
context_length: Option<u64>,
|
|
#[serde(default)]
|
|
architecture: Option<OpenRouterArchitecture>,
|
|
#[serde(default)]
|
|
top_provider: Option<OpenRouterTopProvider>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
#[allow(dead_code)]
|
|
struct OpenRouterPricing {
|
|
prompt: String,
|
|
completion: String,
|
|
#[serde(default)]
|
|
request: Option<String>,
|
|
#[serde(default)]
|
|
image: Option<String>,
|
|
#[serde(default)]
|
|
input_cache_read: Option<String>,
|
|
#[serde(default)]
|
|
input_cache_write: Option<String>,
|
|
#[serde(default)]
|
|
web_search: Option<String>,
|
|
#[serde(default)]
|
|
internal_reasoning: Option<String>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
#[allow(dead_code)]
|
|
struct OpenRouterArchitecture {
|
|
#[serde(default)]
|
|
modality: Option<String>,
|
|
#[serde(default)]
|
|
input_modalities: Option<Vec<String>>,
|
|
#[serde(default)]
|
|
output_modalities: Option<Vec<String>>,
|
|
#[serde(default)]
|
|
tokenizer: Option<String>,
|
|
#[serde(default)]
|
|
instruct_type: Option<String>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
#[allow(dead_code)]
|
|
struct OpenRouterTopProvider {
|
|
#[serde(default)]
|
|
context_length: Option<u64>,
|
|
#[serde(default)]
|
|
max_completion_tokens: Option<u64>,
|
|
#[serde(default)]
|
|
is_moderated: Option<bool>,
|
|
}
|
|
|
|
/// Fallback model type used when the user's AI endpoint is NOT OpenRouter
|
|
/// (e.g. Bailian/MiniMax). OpenRouter has no metadata for these models,
|
|
/// so we sync them directly from the endpoint's own /models response.
|
|
#[derive(Debug, Clone)]
|
|
#[allow(dead_code)]
|
|
struct DirectModel {
|
|
id: String,
|
|
name: Option<String>,
|
|
context_length: Option<u64>,
|
|
}
|
|
|
|
// Response type --------------------------------------------------------------
|
|
|
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
|
pub struct SyncModelsResponse {
|
|
pub models_created: i64,
|
|
pub models_updated: i64,
|
|
pub versions_created: i64,
|
|
pub pricing_created: i64,
|
|
pub capabilities_created: i64,
|
|
pub profiles_created: i64,
|
|
}
|
|
|
|
// Inference helpers ----------------------------------------------------------
|
|
|
|
fn infer_modality(name: &str, arch_modality: Option<&str>) -> ModelModality {
|
|
if let Some(m) = arch_modality {
|
|
let m = m.to_lowercase();
|
|
if m.contains("text") || m.contains("chat") {
|
|
return ModelModality::Text;
|
|
}
|
|
if m.contains("image") || m.contains("vision") {
|
|
return ModelModality::Multimodal;
|
|
}
|
|
if m.contains("audio") || m.contains("speech") {
|
|
return ModelModality::Audio;
|
|
}
|
|
}
|
|
let lower = name.to_lowercase();
|
|
if lower.contains("vision")
|
|
|| lower.contains("dall-e")
|
|
|| lower.contains("gpt-image")
|
|
|| lower.contains("gpt-4o")
|
|
{
|
|
ModelModality::Multimodal
|
|
} else if lower.contains("embedding") {
|
|
ModelModality::Text
|
|
} else if lower.contains("whisper") || lower.contains("audio") {
|
|
ModelModality::Audio
|
|
} else {
|
|
ModelModality::Text
|
|
}
|
|
}
|
|
|
|
fn infer_capability(name: &str) -> ModelCapability {
|
|
let lower = name.to_lowercase();
|
|
if lower.contains("embedding") {
|
|
ModelCapability::Embedding
|
|
} else {
|
|
ModelCapability::Chat
|
|
}
|
|
}
|
|
|
|
fn infer_context_length(ctx: Option<u64>) -> i64 {
|
|
ctx.map(|c| c as i64).unwrap_or(8_192)
|
|
}
|
|
|
|
fn infer_max_output(top_provider_max: Option<u64>) -> Option<i64> {
|
|
top_provider_max.map(|v| v as i64)
|
|
}
|
|
|
|
fn infer_capability_list(arch: &OpenRouterArchitecture) -> Vec<(CapabilityType, bool)> {
|
|
let mut caps = vec![(CapabilityType::FunctionCall, true)];
|
|
if let Some(m) = &arch.modality {
|
|
let m = m.to_lowercase();
|
|
if m.contains("image") || m.contains("vision") {
|
|
caps.push((CapabilityType::Vision, true));
|
|
}
|
|
if m.contains("text") || m.contains("chat") {
|
|
caps.push((CapabilityType::ToolUse, true));
|
|
}
|
|
}
|
|
caps
|
|
}
|
|
|
|
// Provider helpers -----------------------------------------------------------
|
|
|
|
fn extract_provider(model_id: &str) -> &str {
|
|
model_id.split('/').next().unwrap_or("unknown")
|
|
}
|
|
|
|
fn normalize_provider_name(slug: &str) -> &'static str {
|
|
match slug {
|
|
"openai" => "openai",
|
|
"anthropic" => "anthropic",
|
|
"google" | "google-ai" => "google",
|
|
"mistralai" => "mistral",
|
|
"meta-llama" | "meta" => "meta",
|
|
"deepseek" => "deepseek",
|
|
"azure" | "azure-openai" => "azure",
|
|
"x-ai" | "xai" => "xai",
|
|
s => Box::leak(s.to_string().into_boxed_str()),
|
|
}
|
|
}
|
|
|
|
fn provider_display_name(name: &str) -> String {
|
|
match name {
|
|
"openai" => "OpenAI".to_string(),
|
|
"anthropic" => "Anthropic".to_string(),
|
|
"google" => "Google DeepMind".to_string(),
|
|
"mistral" => "Mistral AI".to_string(),
|
|
"meta" => "Meta".to_string(),
|
|
"deepseek" => "DeepSeek".to_string(),
|
|
"azure" => "Microsoft Azure".to_string(),
|
|
"xai" => "xAI".to_string(),
|
|
s => s.to_string(),
|
|
}
|
|
}
|
|
|
|
// Upsert helpers -------------------------------------------------------------
|
|
|
|
async fn upsert_provider(db: &AppDatabase, slug: &str) -> Result<ProviderModel, AppError> {
|
|
let name = normalize_provider_name(slug);
|
|
let display = provider_display_name(name);
|
|
let now = Utc::now();
|
|
|
|
use models::agents::model_provider::Column as PCol;
|
|
if let Some(existing) = ProviderEntity::find()
|
|
.filter(PCol::Name.eq(name))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
let mut active: models::agents::model_provider::ActiveModel = existing.into();
|
|
active.updated_at = Set(now);
|
|
active.update(db).await?;
|
|
Ok(ProviderEntity::find()
|
|
.filter(PCol::Name.eq(name))
|
|
.one(db)
|
|
.await?
|
|
.unwrap())
|
|
} else {
|
|
let active = models::agents::model_provider::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
name: Set(name.to_string()),
|
|
display_name: Set(display.to_string()),
|
|
website: Set(None),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
updated_at: Set(now),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)
|
|
}
|
|
}
|
|
|
|
async fn upsert_model(
|
|
db: &AppDatabase,
|
|
provider_id: Uuid,
|
|
model_id_str: &str,
|
|
or_model: &OpenRouterModel,
|
|
) -> Result<(models::agents::model::Model, bool), AppError> {
|
|
let now = Utc::now();
|
|
let modality_str = or_model
|
|
.architecture
|
|
.as_ref()
|
|
.and_then(|a| a.modality.as_deref());
|
|
let modality = infer_modality(model_id_str, modality_str);
|
|
let capability = infer_capability(model_id_str);
|
|
|
|
let context_length = infer_context_length(or_model.context_length);
|
|
let max_output = infer_max_output(
|
|
or_model
|
|
.top_provider
|
|
.as_ref()
|
|
.and_then(|p| p.max_completion_tokens),
|
|
);
|
|
|
|
use models::agents::model::Column as MCol;
|
|
if let Some(existing) = ModelEntity::find()
|
|
.filter(MCol::ProviderId.eq(provider_id))
|
|
.filter(MCol::Name.eq(model_id_str))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
let mut active: models::agents::model::ActiveModel = existing.clone().into();
|
|
active.context_length = Set(context_length);
|
|
active.max_output_tokens = Set(max_output);
|
|
active.status = Set(ModelStatus::Active.to_string());
|
|
active.updated_at = Set(now);
|
|
active.update(db).await?;
|
|
Ok((
|
|
ModelEntity::find_by_id(existing.id).one(db).await?.unwrap(),
|
|
false,
|
|
))
|
|
} else {
|
|
let active = models::agents::model::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
provider_id: Set(provider_id),
|
|
name: Set(model_id_str.to_string()),
|
|
modality: Set(modality.to_string()),
|
|
capability: Set(capability.to_string()),
|
|
context_length: Set(context_length),
|
|
max_output_tokens: Set(max_output),
|
|
training_cutoff: Set(None),
|
|
is_open_source: Set(false),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
updated_at: Set(now),
|
|
..Default::default()
|
|
};
|
|
let inserted = active.insert(db).await.map_err(AppError::from)?;
|
|
Ok((inserted, true))
|
|
}
|
|
}
|
|
|
|
/// Upsert a model directly from the user's AI endpoint response (no OpenRouter metadata).
|
|
/// Used as fallback when the endpoint is not OpenRouter-compatible.
|
|
async fn upsert_model_direct(
|
|
db: &AppDatabase,
|
|
provider_id: Uuid,
|
|
model_id_str: &str,
|
|
_name: Option<&str>,
|
|
context_length: Option<u64>,
|
|
) -> Result<(models::agents::model::Model, bool), AppError> {
|
|
let now = Utc::now();
|
|
let modality = infer_modality(model_id_str, None);
|
|
let capability = infer_capability(model_id_str);
|
|
let ctx = infer_context_length(context_length);
|
|
|
|
use models::agents::model::Column as MCol;
|
|
if let Some(existing) = ModelEntity::find()
|
|
.filter(MCol::ProviderId.eq(provider_id))
|
|
.filter(MCol::Name.eq(model_id_str))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
let mut active: models::agents::model::ActiveModel = existing.clone().into();
|
|
active.context_length = Set(ctx);
|
|
active.status = Set(ModelStatus::Active.to_string());
|
|
active.updated_at = Set(now);
|
|
active.update(db).await?;
|
|
Ok((
|
|
ModelEntity::find_by_id(existing.id).one(db).await?.unwrap(),
|
|
false,
|
|
))
|
|
} else {
|
|
let active = models::agents::model::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
provider_id: Set(provider_id),
|
|
name: Set(model_id_str.to_string()),
|
|
modality: Set(modality.to_string()),
|
|
capability: Set(capability.to_string()),
|
|
context_length: Set(ctx),
|
|
max_output_tokens: Set(None),
|
|
training_cutoff: Set(None),
|
|
is_open_source: Set(false),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
updated_at: Set(now),
|
|
..Default::default()
|
|
};
|
|
let inserted = active.insert(db).await.map_err(AppError::from)?;
|
|
Ok((inserted, true))
|
|
}
|
|
}
|
|
|
|
async fn upsert_version(
|
|
db: &AppDatabase,
|
|
model_uuid: Uuid,
|
|
) -> Result<(models::agents::model_version::Model, bool), AppError> {
|
|
use models::agents::model_version::Column as VCol;
|
|
let now = Utc::now();
|
|
if let Some(existing) = VersionEntity::find()
|
|
.filter(VCol::ModelId.eq(model_uuid))
|
|
.filter(VCol::IsDefault.eq(true))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
Ok((existing, false))
|
|
} else {
|
|
let active = models::agents::model_version::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
model_id: Set(model_uuid),
|
|
version: Set("1".to_string()),
|
|
release_date: Set(None),
|
|
change_log: Set(None),
|
|
is_default: Set(true),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
};
|
|
let inserted = active.insert(db).await.map_err(AppError::from)?;
|
|
Ok((inserted, true))
|
|
}
|
|
}
|
|
|
|
/// OpenRouter prices are per-million-tokens strings; convert to per-1k-tokens.
|
|
fn parse_price(s: &str) -> String {
|
|
match s.parse::<f64>() {
|
|
Ok(v) => format!("{:.6}", v / 1_000.0),
|
|
Err(_) => "0.00".to_string(),
|
|
}
|
|
}
|
|
|
|
async fn upsert_pricing(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
pricing: Option<&OpenRouterPricing>,
|
|
) -> Result<bool, AppError> {
|
|
use models::agents::model_pricing::Column as PCol;
|
|
let existing = PricingEntity::find()
|
|
.filter(PCol::ModelVersionId.eq(version_uuid))
|
|
.one(db)
|
|
.await?;
|
|
if existing.is_some() {
|
|
return Ok(false);
|
|
}
|
|
|
|
let (input_str, output_str) = if let Some(p) = pricing {
|
|
(parse_price(&p.prompt), parse_price(&p.completion))
|
|
} else {
|
|
("0.00".to_string(), "0.00".to_string())
|
|
};
|
|
|
|
let active = models::agents::model_pricing::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid),
|
|
input_price_per_1k_tokens: Set(input_str),
|
|
output_price_per_1k_tokens: Set(output_str),
|
|
currency: Set("USD".to_string()),
|
|
effective_from: Set(Utc::now()),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
Ok(true)
|
|
}
|
|
|
|
async fn upsert_capabilities(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
arch: Option<&OpenRouterArchitecture>,
|
|
) -> Result<i64, AppError> {
|
|
use models::agents::model_capability::Column as CCol;
|
|
let caps = infer_capability_list(
|
|
arch.unwrap_or(&OpenRouterArchitecture {
|
|
modality: None,
|
|
input_modalities: None,
|
|
output_modalities: None,
|
|
tokenizer: None,
|
|
instruct_type: None,
|
|
}),
|
|
);
|
|
let now = Utc::now();
|
|
let mut created = 0i64;
|
|
|
|
for (cap_type, supported) in caps {
|
|
let exists = CapabilityEntity::find()
|
|
.filter(CCol::ModelVersionId.eq(version_uuid))
|
|
.filter(CCol::Capability.eq(cap_type.to_string()))
|
|
.one(db)
|
|
.await?;
|
|
if exists.is_some() {
|
|
continue;
|
|
}
|
|
let active = models::agents::model_capability::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid.as_u128() as i64),
|
|
capability: Set(cap_type.to_string()),
|
|
is_supported: Set(supported),
|
|
created_at: Set(now),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
created += 1;
|
|
}
|
|
Ok(created)
|
|
}
|
|
|
|
async fn upsert_parameter_profile(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
model_name: &str,
|
|
) -> Result<bool, AppError> {
|
|
use models::agents::model_parameter_profile::Column as PCol;
|
|
let existing = ProfileEntity::find()
|
|
.filter(PCol::ModelVersionId.eq(version_uuid))
|
|
.one(db)
|
|
.await?;
|
|
if existing.is_some() {
|
|
return Ok(false);
|
|
}
|
|
|
|
let lower = model_name.to_lowercase();
|
|
let (t_min, t_max) = if lower.contains("o1") || lower.contains("o3") {
|
|
(1.0, 1.0)
|
|
} else {
|
|
(0.0, 2.0)
|
|
};
|
|
|
|
let active = models::agents::model_parameter_profile::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid),
|
|
temperature_min: Set(t_min),
|
|
temperature_max: Set(t_max),
|
|
top_p_min: Set(0.0),
|
|
top_p_max: Set(1.0),
|
|
frequency_penalty_supported: Set(true),
|
|
presence_penalty_supported: Set(true),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
Ok(true)
|
|
}
|
|
|
|
/// Sync models directly from the user's AI endpoint when OpenRouter has no matching models.
|
|
/// This handles non-OpenRouter endpoints (e.g. Bailian, MiniMax) gracefully.
|
|
async fn sync_models_direct(
|
|
db: &AppDatabase,
|
|
log: &Logger,
|
|
available_ids: &std::collections::HashSet<String>,
|
|
) -> SyncModelsResponse {
|
|
slog::info!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"sync_models_direct: {} models from endpoint (no OpenRouter metadata)",
|
|
available_ids.len()
|
|
)
|
|
);
|
|
|
|
let mut models_created = 0i64;
|
|
let mut models_updated = 0i64;
|
|
let mut versions_created = 0i64;
|
|
let mut pricing_created = 0i64;
|
|
let mut capabilities_created = 0i64;
|
|
let mut profiles_created = 0i64;
|
|
|
|
for model_id in available_ids {
|
|
let provider_slug = extract_provider(model_id);
|
|
let provider = match upsert_provider(db, provider_slug).await {
|
|
Ok(p) => p,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"sync_models_direct: upsert_provider error provider={} {:?}",
|
|
provider_slug, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (model_record, _is_new) =
|
|
match upsert_model_direct(db, provider.id, model_id, None, None).await {
|
|
Ok((m, n)) => {
|
|
if n {
|
|
models_created += 1;
|
|
} else {
|
|
models_updated += 1;
|
|
}
|
|
(m, n)
|
|
}
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"sync_models_direct: upsert_model_direct error model={} {:?}",
|
|
model_id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (version_record, version_is_new) =
|
|
match upsert_version(db, model_record.id).await {
|
|
Ok(v) => v,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"sync_models_direct: upsert_version error model={} {:?}",
|
|
model_id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
if version_is_new {
|
|
versions_created += 1;
|
|
}
|
|
|
|
if upsert_pricing(db, version_record.id, None).await.unwrap_or(false) {
|
|
pricing_created += 1;
|
|
}
|
|
|
|
capabilities_created +=
|
|
upsert_capabilities(db, version_record.id, None).await.unwrap_or(0);
|
|
|
|
if upsert_parameter_profile(db, version_record.id, model_id)
|
|
.await
|
|
.unwrap_or(false)
|
|
{
|
|
profiles_created += 1;
|
|
}
|
|
}
|
|
|
|
slog::info!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"sync_models_direct complete: matched={} created={} updated={} \
|
|
versions={} pricing={} capabilities={} profiles={}",
|
|
available_ids.len(),
|
|
models_created,
|
|
models_updated,
|
|
versions_created,
|
|
pricing_created,
|
|
capabilities_created,
|
|
profiles_created
|
|
)
|
|
);
|
|
|
|
SyncModelsResponse {
|
|
models_created,
|
|
models_updated,
|
|
versions_created,
|
|
pricing_created,
|
|
capabilities_created,
|
|
profiles_created,
|
|
}
|
|
}
|
|
|
|
// HTTP helpers ---------------------------------------------------------------
|
|
|
|
async fn fetch_openrouter_models(
|
|
client: &reqwest::Client,
|
|
log: &Logger,
|
|
) -> Result<OpenRouterResponse, String> {
|
|
const MAX_RETRIES: u32 = 3;
|
|
const BASE_DELAY_MS: u64 = 1_000;
|
|
|
|
let mut attempt = 0;
|
|
loop {
|
|
attempt += 1;
|
|
match client.get(OPENROUTER_URL).send().await {
|
|
Ok(r) => {
|
|
return match r.error_for_status() {
|
|
Ok(resp) => match resp.json::<OpenRouterResponse>().await {
|
|
Ok(root) => Ok(root),
|
|
Err(e) => Err(format!(
|
|
"failed to parse response after {} attempt(s): {}",
|
|
attempt, e
|
|
)),
|
|
},
|
|
Err(e) => Err(format!(
|
|
"HTTP status error after {} attempt(s): url={} status={}",
|
|
attempt,
|
|
e.url()
|
|
.map(|u| u.to_string())
|
|
.unwrap_or_else(|| OPENROUTER_URL.to_string()),
|
|
e
|
|
)),
|
|
};
|
|
}
|
|
Err(e) => {
|
|
let kind = if e.is_timeout() {
|
|
"timeout"
|
|
} else if e.is_connect() {
|
|
"connect"
|
|
} else {
|
|
"request"
|
|
};
|
|
let url = e
|
|
.url()
|
|
.map(|u| u.to_string())
|
|
.unwrap_or_else(|| OPENROUTER_URL.to_string());
|
|
|
|
if attempt >= MAX_RETRIES {
|
|
return Err(format!(
|
|
"OpenRouter connection failed after {} attempt(s): [{}] url={} error={:?}",
|
|
attempt, kind, url, e
|
|
));
|
|
}
|
|
let delay_ms = BASE_DELAY_MS * (1 << (attempt - 1));
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter connection attempt {}/{} failed: [{}] url={} error={:?}. retrying in {}ms",
|
|
attempt, MAX_RETRIES, kind, url, e, delay_ms
|
|
)
|
|
);
|
|
sleep(Duration::from_millis(delay_ms)).await;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Build an async_openai Client from the AI config.
|
|
fn build_ai_client(config: &config::AppConfig) -> Result<Client<OpenAIConfig>, AppError> {
|
|
let api_key = config
|
|
.ai_api_key()
|
|
.map_err(|e| AppError::InternalServerError(format!("AI API key not configured: {}", e)))?;
|
|
|
|
let base_url = config
|
|
.ai_basic_url()
|
|
.unwrap_or_else(|_| "https://api.openai.com".into());
|
|
|
|
let cfg = OpenAIConfig::new()
|
|
.with_api_key(&api_key)
|
|
.with_api_base(&base_url);
|
|
|
|
Ok(Client::with_config(cfg))
|
|
}
|
|
|
|
impl AppService {
|
|
/// Sync metadata for models that are accessible by the configured AI client.
|
|
///
|
|
/// Steps:
|
|
/// 1. Call `client.models().list()` to get the set of accessible model IDs.
|
|
/// 2. Fetch full model list from OpenRouter's public `/api/v1/models` endpoint.
|
|
/// 3. Keep only models whose ID appears in the accessible set, then upsert.
|
|
pub async fn sync_upstream_models(
|
|
&self,
|
|
_ctx: &Session,
|
|
) -> Result<SyncModelsResponse, AppError> {
|
|
// Step 1: list models the AI client can access.
|
|
let ai_client = build_ai_client(&self.config)?;
|
|
let available_ids: std::collections::HashSet<String> = ai_client
|
|
.models()
|
|
.list()
|
|
.await
|
|
.map_err(|e| {
|
|
AppError::InternalServerError(format!(
|
|
"failed to list available models from AI endpoint: {}",
|
|
e
|
|
))
|
|
})?
|
|
.data
|
|
.into_iter()
|
|
.map(|m: OpenAiModel| m.id)
|
|
.collect();
|
|
|
|
slog::info!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: {} accessible models found",
|
|
available_ids.len()
|
|
)
|
|
);
|
|
|
|
// Step 2: fetch OpenRouter metadata.
|
|
let http_client = reqwest::Client::new();
|
|
let or_resp: OpenRouterResponse = fetch_openrouter_models(&http_client, &self.logs)
|
|
.await
|
|
.map_err(AppError::InternalServerError)?;
|
|
|
|
// Step 3: filter to only accessible models.
|
|
let filtered: Vec<&OpenRouterModel> = or_resp
|
|
.data
|
|
.iter()
|
|
.filter(|m| available_ids.contains(&m.id))
|
|
.filter(|m| m.id != "openrouter/auto")
|
|
.collect();
|
|
|
|
let filtered_count = filtered.len();
|
|
|
|
// Fallback: if no OpenRouter metadata matches, sync models directly from
|
|
// the user's endpoint (handles Bailian/MiniMax and other non-OpenRouter providers).
|
|
if filtered_count == 0 && !available_ids.is_empty() {
|
|
slog::info!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: no OpenRouter matches, falling back to direct sync for {} models",
|
|
available_ids.len()
|
|
)
|
|
);
|
|
return Ok(sync_models_direct(&self.db, &self.logs, &available_ids).await);
|
|
}
|
|
|
|
let mut models_created = 0i64;
|
|
let mut models_updated = 0i64;
|
|
let mut versions_created = 0i64;
|
|
let mut pricing_created = 0i64;
|
|
let mut capabilities_created = 0i64;
|
|
let mut profiles_created = 0i64;
|
|
|
|
for or_model in filtered {
|
|
let provider_slug = extract_provider(&or_model.id);
|
|
let provider = match upsert_provider(&self.db, provider_slug).await {
|
|
Ok(p) => p,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: upsert_provider error provider={} {:?}",
|
|
provider_slug, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (model_record, _is_new) =
|
|
match upsert_model(&self.db, provider.id, &or_model.id, or_model).await {
|
|
Ok((m, n)) => {
|
|
if n {
|
|
models_created += 1;
|
|
} else {
|
|
models_updated += 1;
|
|
}
|
|
(m, n)
|
|
}
|
|
Err(e) => {
|
|
slog::warn!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: upsert_model error model={} {:?}",
|
|
or_model.id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (version_record, version_is_new) =
|
|
match upsert_version(&self.db, model_record.id).await {
|
|
Ok(v) => v,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: upsert_version error model={} {:?}",
|
|
or_model.id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
if version_is_new {
|
|
versions_created += 1;
|
|
}
|
|
|
|
if let Err(e) =
|
|
upsert_pricing(&self.db, version_record.id, or_model.pricing.as_ref()).await
|
|
{
|
|
slog::warn!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: upsert_pricing error model={} {:?}",
|
|
or_model.id, e
|
|
)
|
|
);
|
|
} else {
|
|
pricing_created += 1;
|
|
}
|
|
|
|
capabilities_created +=
|
|
upsert_capabilities(&self.db, version_record.id, or_model.architecture.as_ref())
|
|
.await
|
|
.unwrap_or(0);
|
|
|
|
if upsert_parameter_profile(&self.db, version_record.id, &or_model.id)
|
|
.await
|
|
.unwrap_or(false)
|
|
{
|
|
profiles_created += 1;
|
|
}
|
|
}
|
|
|
|
slog::info!(
|
|
self.logs,
|
|
"{}",
|
|
format!(
|
|
"sync_upstream_models: synced {} accessible models ({}/{} new/updated)",
|
|
filtered_count,
|
|
models_created,
|
|
models_updated
|
|
)
|
|
);
|
|
|
|
Ok(SyncModelsResponse {
|
|
models_created,
|
|
models_updated,
|
|
versions_created,
|
|
pricing_created,
|
|
capabilities_created,
|
|
profiles_created,
|
|
})
|
|
}
|
|
|
|
/// Spawn a background task that syncs model metadata immediately
|
|
/// and then every 10 minutes. Returns the `JoinHandle`.
|
|
///
|
|
/// Failures are logged but do not stop the task — it keeps retrying.
|
|
pub fn start_sync_task(self) -> JoinHandle<()> {
|
|
let db = self.db.clone();
|
|
let log = self.logs.clone();
|
|
let ai_api_key = self.config.ai_api_key().ok();
|
|
let ai_base_url = self.config.ai_basic_url().ok();
|
|
|
|
tokio::spawn(async move {
|
|
// Run once immediately on startup before taking traffic.
|
|
Self::sync_once(&db, &log, ai_api_key.clone(), ai_base_url.clone()).await;
|
|
|
|
let mut tick = interval(Duration::from_secs(60 * 10));
|
|
loop {
|
|
tick.tick().await;
|
|
Self::sync_once(&db, &log, ai_api_key.clone(), ai_base_url.clone()).await;
|
|
}
|
|
})
|
|
}
|
|
|
|
/// Perform a single sync pass. Errors are logged and silently swallowed
|
|
/// so the periodic task never stops.
|
|
async fn sync_once(
|
|
db: &AppDatabase,
|
|
log: &Logger,
|
|
ai_api_key: Option<String>,
|
|
ai_base_url: Option<String>,
|
|
) {
|
|
// Build AI client to list accessible models.
|
|
let ai_client = match build_ai_client_from_parts(ai_api_key, ai_base_url) {
|
|
Ok(c) => c,
|
|
Err(msg) => {
|
|
slog::warn!(log, "{}", format!("OpenRouter model sync: {}", msg));
|
|
return;
|
|
}
|
|
};
|
|
|
|
let available_ids: std::collections::HashSet<String> = match ai_client.models().list().await {
|
|
Ok(resp) => resp.data.into_iter().map(|m: OpenAiModel| m.id).collect(),
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!("OpenRouter model sync: failed to list available models: {}", e)
|
|
);
|
|
return;
|
|
}
|
|
};
|
|
|
|
let http_client = reqwest::Client::new();
|
|
let or_resp = match fetch_openrouter_models(&http_client, log).await {
|
|
Ok(r) => r,
|
|
Err(msg) => {
|
|
slog::warn!(log, "{}", format!("OpenRouter model sync: {}", msg));
|
|
return;
|
|
}
|
|
};
|
|
|
|
let filtered: Vec<&OpenRouterModel> = or_resp
|
|
.data
|
|
.iter()
|
|
.filter(|m| available_ids.contains(&m.id))
|
|
.filter(|m| m.id != "openrouter/auto")
|
|
.collect();
|
|
|
|
let filtered_count = filtered.len();
|
|
|
|
// Fallback: if no OpenRouter metadata matches, sync models directly from
|
|
// the user's endpoint (handles Bailian/MiniMax and other non-OpenRouter providers).
|
|
if filtered_count == 0 && !available_ids.is_empty() {
|
|
slog::info!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter model sync: no matches, falling back to direct sync for {} models",
|
|
available_ids.len()
|
|
)
|
|
);
|
|
sync_models_direct(db, log, &available_ids).await;
|
|
return;
|
|
}
|
|
|
|
let mut models_created = 0i64;
|
|
let mut models_updated = 0i64;
|
|
let mut versions_created = 0i64;
|
|
let mut pricing_created = 0i64;
|
|
let mut capabilities_created = 0i64;
|
|
let mut profiles_created = 0i64;
|
|
|
|
for or_model in filtered {
|
|
let provider_slug = extract_provider(&or_model.id);
|
|
let provider = match upsert_provider(db, provider_slug).await {
|
|
Ok(p) => p,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter model sync: upsert_provider error provider={} {:?}",
|
|
provider_slug, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (model_record, _is_new) =
|
|
match upsert_model(db, provider.id, &or_model.id, or_model).await {
|
|
Ok((m, true)) => {
|
|
models_created += 1;
|
|
(m, true)
|
|
}
|
|
Ok((m, false)) => {
|
|
models_updated += 1;
|
|
(m, false)
|
|
}
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter model sync: upsert_model error model={} {:?}",
|
|
or_model.id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let (version_record, version_is_new) =
|
|
match upsert_version(db, model_record.id).await {
|
|
Ok(v) => v,
|
|
Err(e) => {
|
|
slog::warn!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter model sync: upsert_version error model={} {:?}",
|
|
or_model.id, e
|
|
)
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
if version_is_new {
|
|
versions_created += 1;
|
|
}
|
|
|
|
if upsert_pricing(db, version_record.id, or_model.pricing.as_ref())
|
|
.await
|
|
.unwrap_or(false)
|
|
{
|
|
pricing_created += 1;
|
|
}
|
|
|
|
capabilities_created +=
|
|
upsert_capabilities(db, version_record.id, or_model.architecture.as_ref())
|
|
.await
|
|
.unwrap_or(0);
|
|
|
|
if upsert_parameter_profile(db, version_record.id, &or_model.id)
|
|
.await
|
|
.unwrap_or(false)
|
|
{
|
|
profiles_created += 1;
|
|
}
|
|
}
|
|
|
|
slog::info!(
|
|
log,
|
|
"{}",
|
|
format!(
|
|
"OpenRouter model sync complete: matched={} created={} updated={} \
|
|
versions={} pricing={} capabilities={} profiles={}",
|
|
filtered_count,
|
|
models_created,
|
|
models_updated,
|
|
versions_created,
|
|
pricing_created,
|
|
capabilities_created,
|
|
profiles_created
|
|
)
|
|
);
|
|
}
|
|
}
|
|
|
|
/// Build an async_openai Client from raw API key and base URL (for background task).
|
|
fn build_ai_client_from_parts(
|
|
api_key: Option<String>,
|
|
base_url: Option<String>,
|
|
) -> Result<Client<OpenAIConfig>, String> {
|
|
let api_key = api_key.ok_or_else(|| "AI API key not configured".to_string())?;
|
|
|
|
let base_url = base_url.unwrap_or_else(|| "https://api.openai.com".into());
|
|
|
|
let cfg = OpenAIConfig::new()
|
|
.with_api_key(&api_key)
|
|
.with_api_base(&base_url);
|
|
|
|
Ok(Client::with_config(cfg))
|
|
}
|