639 lines
21 KiB
Rust
639 lines
21 KiB
Rust
//! Synchronizes AI models from OpenRouter into the local database.
|
|
//!
|
|
//! Fetches the full model list via OpenRouter's `/api/v1/models` endpoint
|
|
//! (requires `OPENROUTER_API_KEY` in config or falls back to `AI_API_KEY`).
|
|
//!
|
|
//! OpenRouter returns rich metadata per model including `context_length`,
|
|
//! `pricing`, and `architecture.modality` — these are used to populate all
|
|
//! five model tables without any hard-coded heuristics.
|
|
|
|
use crate::AppService;
|
|
use crate::error::AppError;
|
|
use chrono::Utc;
|
|
use db::database::AppDatabase;
|
|
use models::agents::model::Entity as ModelEntity;
|
|
use models::agents::model_capability::Entity as CapabilityEntity;
|
|
use models::agents::model_parameter_profile::Entity as ProfileEntity;
|
|
use models::agents::model_pricing::Entity as PricingEntity;
|
|
use models::agents::model_provider::Entity as ProviderEntity;
|
|
use models::agents::model_provider::Model as ProviderModel;
|
|
use models::agents::model_version::Entity as VersionEntity;
|
|
use models::agents::{CapabilityType, ModelCapability, ModelModality, ModelStatus};
|
|
use sea_orm::prelude::*;
|
|
use sea_orm::Set;
|
|
use serde::Deserialize;
|
|
use serde::Serialize;
|
|
use session::Session;
|
|
use utoipa::ToSchema;
|
|
use uuid::Uuid;
|
|
|
|
// OpenRouter API types -------------------------------------------------------
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterResponse {
|
|
data: Vec<OpenRouterModel>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterModel {
|
|
id: String,
|
|
name: Option<String>,
|
|
#[serde(default)]
|
|
description: Option<String>,
|
|
pricing: Option<OpenRouterPricing>,
|
|
#[serde(default)]
|
|
context_length: Option<u64>,
|
|
#[serde(default)]
|
|
architecture: Option<OpenRouterArchitecture>,
|
|
#[serde(default)]
|
|
top_provider: Option<OpenRouterTopProvider>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterPricing {
|
|
prompt: String,
|
|
completion: String,
|
|
#[serde(default)]
|
|
request: Option<String>,
|
|
#[serde(default)]
|
|
image: Option<String>,
|
|
#[serde(default)]
|
|
input_cache_read: Option<String>,
|
|
#[serde(default)]
|
|
input_cache_write: Option<String>,
|
|
#[serde(default)]
|
|
web_search: Option<String>,
|
|
#[serde(default)]
|
|
internal_reasoning: Option<String>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterArchitecture {
|
|
#[serde(default)]
|
|
modality: Option<String>,
|
|
#[serde(default)]
|
|
input_modalities: Option<Vec<String>>,
|
|
#[serde(default)]
|
|
output_modalities: Option<Vec<String>>,
|
|
#[serde(default)]
|
|
tokenizer: Option<String>,
|
|
#[serde(default)]
|
|
instruct_type: Option<String>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
struct OpenRouterTopProvider {
|
|
#[serde(default)]
|
|
context_length: Option<u64>,
|
|
#[serde(default)]
|
|
max_completion_tokens: Option<u64>,
|
|
#[serde(default)]
|
|
is_moderated: Option<bool>,
|
|
}
|
|
|
|
// Response type --------------------------------------------------------------
|
|
|
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
|
pub struct SyncModelsResponse {
|
|
pub models_created: i64,
|
|
pub models_updated: i64,
|
|
pub versions_created: i64,
|
|
pub pricing_created: i64,
|
|
pub capabilities_created: i64,
|
|
pub profiles_created: i64,
|
|
}
|
|
|
|
// Inference helpers (fallbacks when OpenRouter data is missing) ---------------
|
|
|
|
fn infer_modality(name: &str, arch_modality: Option<&str>) -> ModelModality {
|
|
if let Some(m) = arch_modality {
|
|
let m = m.to_lowercase();
|
|
if m.contains("text") || m.contains("chat") {
|
|
return ModelModality::Text;
|
|
}
|
|
if m.contains("image") || m.contains("vision") {
|
|
return ModelModality::Multimodal;
|
|
}
|
|
if m.contains("audio") || m.contains("speech") {
|
|
return ModelModality::Audio;
|
|
}
|
|
}
|
|
let lower = name.to_lowercase();
|
|
if lower.contains("vision")
|
|
|| lower.contains("dall-e")
|
|
|| lower.contains("gpt-image")
|
|
|| lower.contains("gpt-4o")
|
|
{
|
|
ModelModality::Multimodal
|
|
} else if lower.contains("embedding") {
|
|
ModelModality::Text
|
|
} else if lower.contains("whisper") || lower.contains("audio") {
|
|
ModelModality::Audio
|
|
} else {
|
|
ModelModality::Text
|
|
}
|
|
}
|
|
|
|
fn infer_capability(name: &str) -> ModelCapability {
|
|
let lower = name.to_lowercase();
|
|
if lower.contains("embedding") {
|
|
ModelCapability::Embedding
|
|
} else if lower.contains("code") {
|
|
ModelCapability::Code
|
|
} else {
|
|
ModelCapability::Chat
|
|
}
|
|
}
|
|
|
|
fn infer_context_length(name: &str) -> i64 {
|
|
let lower = name.to_lowercase();
|
|
// Hard-coded fallback table for known models
|
|
let fallbacks: &[(&str, i64)] = &[
|
|
("gpt-4o", 128_000),
|
|
("chatgpt-4o", 128_000),
|
|
("o1-preview", 128_000),
|
|
("o1-mini", 65_536),
|
|
("o1", 65_536),
|
|
("o3-mini", 65_536),
|
|
("gpt-4-turbo", 128_000),
|
|
("gpt-4-32k", 32_768),
|
|
("gpt-4", 8_192),
|
|
("gpt-4o-mini", 128_000),
|
|
("chatgpt-4o-mini", 128_000),
|
|
("gpt-3.5-turbo-16k", 16_384),
|
|
("gpt-3.5-turbo", 16_385),
|
|
("text-embedding-3-large", 8_191),
|
|
("text-embedding-3-small", 8_191),
|
|
("text-embedding-ada", 8_191),
|
|
("dall-e", 4_096),
|
|
("whisper", 30_000),
|
|
("gpt-image-1", 16_384),
|
|
];
|
|
for (prefix, ctx) in fallbacks {
|
|
if lower.starts_with(prefix) {
|
|
return *ctx;
|
|
}
|
|
}
|
|
8_192
|
|
}
|
|
|
|
fn infer_max_output(name: &str, top_provider_max: Option<u64>) -> Option<i64> {
|
|
if let Some(v) = top_provider_max {
|
|
return Some(v as i64);
|
|
}
|
|
let lower = name.to_lowercase();
|
|
let fallbacks: &[(&str, i64)] = &[
|
|
("gpt-4o", 16_384),
|
|
("chatgpt-4o", 16_384),
|
|
("o1-preview", 32_768),
|
|
("o1-mini", 65_536),
|
|
("o1", 100_000),
|
|
("o3-mini", 100_000),
|
|
("gpt-4-turbo", 4_096),
|
|
("gpt-4-32k", 32_768),
|
|
("gpt-4", 8_192),
|
|
("gpt-4o-mini", 16_384),
|
|
("chatgpt-4o-mini", 16_384),
|
|
("gpt-3.5-turbo", 4_096),
|
|
("gpt-image-1", 1_024),
|
|
];
|
|
for (prefix, max) in fallbacks {
|
|
if lower.starts_with(prefix) {
|
|
return Some(*max);
|
|
}
|
|
}
|
|
if lower.starts_with("gpt") || lower.starts_with("o1") || lower.starts_with("o3") {
|
|
Some(4_096)
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
fn infer_capability_list(name: &str) -> Vec<(CapabilityType, bool)> {
|
|
let lower = name.to_lowercase();
|
|
let mut caps = Vec::new();
|
|
caps.push((CapabilityType::FunctionCall, true));
|
|
|
|
if lower.contains("gpt-") || lower.contains("o1") || lower.contains("o3") {
|
|
caps.push((CapabilityType::ToolUse, true));
|
|
}
|
|
|
|
if lower.contains("vision")
|
|
|| lower.contains("gpt-4o")
|
|
|| lower.contains("gpt-image")
|
|
|| lower.contains("dall-e")
|
|
{
|
|
caps.push((CapabilityType::Vision, true));
|
|
}
|
|
|
|
if lower.contains("o1") || lower.contains("o3") {
|
|
caps.push((CapabilityType::Reasoning, true));
|
|
}
|
|
|
|
caps
|
|
}
|
|
|
|
fn infer_pricing_fallback(name: &str) -> Option<(String, String)> {
|
|
let lower = name.to_lowercase();
|
|
if lower.contains("gpt-4o-mini") || lower.contains("chatgpt-4o-mini") {
|
|
Some(("0.075".to_string(), "0.30".to_string()))
|
|
} else if lower.contains("gpt-4o") || lower.contains("chatgpt-4o") {
|
|
Some(("2.50".to_string(), "10.00".to_string()))
|
|
} else if lower.contains("gpt-4-turbo") {
|
|
Some(("10.00".to_string(), "30.00".to_string()))
|
|
} else if lower.contains("gpt-4") && !lower.contains("4o") {
|
|
Some(("15.00".to_string(), "60.00".to_string()))
|
|
} else if lower.contains("gpt-3.5-turbo") {
|
|
Some(("0.50".to_string(), "1.50".to_string()))
|
|
} else if lower.contains("o1-preview") {
|
|
Some(("15.00".to_string(), "60.00".to_string()))
|
|
} else if lower.contains("o1-mini") {
|
|
Some(("3.00".to_string(), "12.00".to_string()))
|
|
} else if lower.contains("o1") {
|
|
Some(("15.00".to_string(), "60.00".to_string()))
|
|
} else if lower.contains("o3-mini") {
|
|
Some(("1.50".to_string(), "6.00".to_string()))
|
|
} else if lower.contains("embedding-3-small") {
|
|
Some(("0.02".to_string(), "0.00".to_string()))
|
|
} else if lower.contains("embedding-3-large") {
|
|
Some(("0.13".to_string(), "0.00".to_string()))
|
|
} else if lower.contains("embedding-ada") {
|
|
Some(("0.10".to_string(), "0.00".to_string()))
|
|
} else if lower.contains("embedding") {
|
|
Some(("0.10".to_string(), "0.00".to_string()))
|
|
} else if lower.contains("dall-e") {
|
|
Some(("0.00".to_string(), "4.00".to_string()))
|
|
} else if lower.contains("whisper") {
|
|
Some(("0.00".to_string(), "0.006".to_string()))
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
// Provider helpers -----------------------------------------------------------
|
|
|
|
/// Extract provider slug from OpenRouter model ID (e.g. "anthropic/claude-3.5-sonnet" → "anthropic").
|
|
fn extract_provider(model_id: &str) -> &str {
|
|
model_id.split('/').next().unwrap_or("unknown")
|
|
}
|
|
|
|
/// Normalize a provider slug to a short canonical name.
|
|
fn normalize_provider_name(slug: &str) -> &'static str {
|
|
match slug {
|
|
"openai" => "openai",
|
|
"anthropic" => "anthropic",
|
|
"google" | "google-ai" => "google",
|
|
"mistralai" => "mistral",
|
|
"meta-llama" | "meta" => "meta",
|
|
"deepseek" => "deepseek",
|
|
"azure" | "azure-openai" => "azure",
|
|
"x-ai" | "xai" => "xai",
|
|
s => Box::leak(s.to_string().into_boxed_str()),
|
|
}
|
|
}
|
|
|
|
fn provider_display_name(name: &str) -> String {
|
|
match name {
|
|
"openai" => "OpenAI".to_string(),
|
|
"anthropic" => "Anthropic".to_string(),
|
|
"google" => "Google DeepMind".to_string(),
|
|
"mistral" => "Mistral AI".to_string(),
|
|
"meta" => "Meta".to_string(),
|
|
"deepseek" => "DeepSeek".to_string(),
|
|
"azure" => "Microsoft Azure".to_string(),
|
|
"xai" => "xAI".to_string(),
|
|
s => s.to_string(),
|
|
}
|
|
}
|
|
|
|
// Upsert helpers -------------------------------------------------------------
|
|
|
|
async fn upsert_provider(
|
|
db: &AppDatabase,
|
|
slug: &str,
|
|
) -> Result<ProviderModel, AppError> {
|
|
let name = normalize_provider_name(slug);
|
|
let display = provider_display_name(name);
|
|
let now = Utc::now();
|
|
|
|
use models::agents::model_provider::Column as PCol;
|
|
if let Some(existing) = ProviderEntity::find()
|
|
.filter(PCol::Name.eq(name))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
let mut active: models::agents::model_provider::ActiveModel = existing.into();
|
|
active.updated_at = Set(now);
|
|
active.update(db).await?;
|
|
Ok(ProviderEntity::find()
|
|
.filter(PCol::Name.eq(name))
|
|
.one(db)
|
|
.await?
|
|
.unwrap())
|
|
} else {
|
|
let active = models::agents::model_provider::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
name: Set(name.to_string()),
|
|
display_name: Set(display.to_string()),
|
|
website: Set(None),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
updated_at: Set(now),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)
|
|
}
|
|
}
|
|
|
|
/// Upsert a model record and return (model, is_new).
|
|
async fn upsert_model(
|
|
db: &AppDatabase,
|
|
provider_id: Uuid,
|
|
model_id_str: &str,
|
|
or_model: &OpenRouterModel,
|
|
) -> Result<(models::agents::model::Model, bool), AppError> {
|
|
let now = Utc::now();
|
|
let modality_str = or_model
|
|
.architecture
|
|
.as_ref()
|
|
.and_then(|a| a.modality.as_deref());
|
|
let modality = infer_modality(model_id_str, modality_str);
|
|
let capability = infer_capability(model_id_str);
|
|
|
|
// OpenRouter context_length takes priority; fall back to inference
|
|
let context_length = or_model
|
|
.context_length
|
|
.map(|c| c as i64)
|
|
.unwrap_or_else(|| infer_context_length(model_id_str));
|
|
|
|
let max_output =
|
|
infer_max_output(model_id_str, or_model.top_provider.as_ref().and_then(|p| p.max_completion_tokens));
|
|
|
|
use models::agents::model::Column as MCol;
|
|
if let Some(existing) = ModelEntity::find()
|
|
.filter(MCol::ProviderId.eq(provider_id))
|
|
.filter(MCol::Name.eq(model_id_str))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
let mut active: models::agents::model::ActiveModel = existing.clone().into();
|
|
active.context_length = Set(context_length);
|
|
active.max_output_tokens = Set(max_output);
|
|
active.status = Set(ModelStatus::Active.to_string());
|
|
active.updated_at = Set(now);
|
|
active.update(db).await?;
|
|
Ok((ModelEntity::find_by_id(existing.id).one(db).await?.unwrap(), false))
|
|
} else {
|
|
let active = models::agents::model::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
provider_id: Set(provider_id),
|
|
name: Set(model_id_str.to_string()),
|
|
modality: Set(modality.to_string()),
|
|
capability: Set(capability.to_string()),
|
|
context_length: Set(context_length),
|
|
max_output_tokens: Set(max_output),
|
|
training_cutoff: Set(None),
|
|
is_open_source: Set(false),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
updated_at: Set(now),
|
|
..Default::default()
|
|
};
|
|
let inserted = active.insert(db).await.map_err(AppError::from)?;
|
|
Ok((inserted, true))
|
|
}
|
|
}
|
|
|
|
/// Upsert default version for a model.
|
|
async fn upsert_version(
|
|
db: &AppDatabase,
|
|
model_uuid: Uuid,
|
|
) -> Result<(models::agents::model_version::Model, bool), AppError> {
|
|
use models::agents::model_version::Column as VCol;
|
|
let now = Utc::now();
|
|
if let Some(existing) = VersionEntity::find()
|
|
.filter(VCol::ModelId.eq(model_uuid))
|
|
.filter(VCol::IsDefault.eq(true))
|
|
.one(db)
|
|
.await?
|
|
{
|
|
Ok((existing, false))
|
|
} else {
|
|
let active = models::agents::model_version::ActiveModel {
|
|
id: Set(Uuid::now_v7()),
|
|
model_id: Set(model_uuid),
|
|
version: Set("1".to_string()),
|
|
release_date: Set(None),
|
|
change_log: Set(None),
|
|
is_default: Set(true),
|
|
status: Set(ModelStatus::Active.to_string()),
|
|
created_at: Set(now),
|
|
};
|
|
let inserted = active.insert(db).await.map_err(AppError::from)?;
|
|
Ok((inserted, true))
|
|
}
|
|
}
|
|
|
|
/// Upsert pricing for a model version. Returns true if created.
|
|
async fn upsert_pricing(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
pricing: Option<&OpenRouterPricing>,
|
|
model_name: &str,
|
|
) -> Result<bool, AppError> {
|
|
use models::agents::model_pricing::Column as PCol;
|
|
let existing = PricingEntity::find()
|
|
.filter(PCol::ModelVersionId.eq(version_uuid))
|
|
.one(db)
|
|
.await?;
|
|
if existing.is_some() {
|
|
return Ok(false);
|
|
}
|
|
|
|
let (input_str, output_str) = if let Some(p) = pricing {
|
|
// OpenRouter prices are per-million-tokens strings
|
|
(p.prompt.clone(), p.completion.clone())
|
|
} else if let Some((i, o)) = infer_pricing_fallback(model_name) {
|
|
(i, o)
|
|
} else {
|
|
("0.00".to_string(), "0.00".to_string())
|
|
};
|
|
|
|
let active = models::agents::model_pricing::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid),
|
|
input_price_per_1k_tokens: Set(input_str),
|
|
output_price_per_1k_tokens: Set(output_str),
|
|
currency: Set("USD".to_string()),
|
|
effective_from: Set(Utc::now()),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
Ok(true)
|
|
}
|
|
|
|
/// Upsert capability records for a model version. Returns count of new records.
|
|
async fn upsert_capabilities(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
model_name: &str,
|
|
) -> Result<i64, AppError> {
|
|
use models::agents::model_capability::Column as CCol;
|
|
let caps = infer_capability_list(model_name);
|
|
let now = Utc::now();
|
|
let mut created = 0i64;
|
|
|
|
for (cap_type, supported) in caps {
|
|
let exists = CapabilityEntity::find()
|
|
.filter(CCol::ModelVersionId.eq(version_uuid))
|
|
.filter(CCol::Capability.eq(cap_type.to_string()))
|
|
.one(db)
|
|
.await?;
|
|
if exists.is_some() {
|
|
continue;
|
|
}
|
|
let active = models::agents::model_capability::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid.as_u128() as i64),
|
|
capability: Set(cap_type.to_string()),
|
|
is_supported: Set(supported),
|
|
created_at: Set(now),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
created += 1;
|
|
}
|
|
Ok(created)
|
|
}
|
|
|
|
/// Upsert default parameter profile for a model version. Returns true if created.
|
|
async fn upsert_parameter_profile(
|
|
db: &AppDatabase,
|
|
version_uuid: Uuid,
|
|
model_name: &str,
|
|
) -> Result<bool, AppError> {
|
|
use models::agents::model_parameter_profile::Column as PCol;
|
|
let existing = ProfileEntity::find()
|
|
.filter(PCol::ModelVersionId.eq(version_uuid))
|
|
.one(db)
|
|
.await?;
|
|
if existing.is_some() {
|
|
return Ok(false);
|
|
}
|
|
|
|
let lower = model_name.to_lowercase();
|
|
let (t_min, t_max) = if lower.contains("o1") || lower.contains("o3") {
|
|
(1.0, 1.0)
|
|
} else {
|
|
(0.0, 2.0)
|
|
};
|
|
|
|
let active = models::agents::model_parameter_profile::ActiveModel {
|
|
id: Set(Uuid::now_v7().as_u128() as i64),
|
|
model_version_id: Set(version_uuid),
|
|
temperature_min: Set(t_min),
|
|
temperature_max: Set(t_max),
|
|
top_p_min: Set(0.0),
|
|
top_p_max: Set(1.0),
|
|
frequency_penalty_supported: Set(true),
|
|
presence_penalty_supported: Set(true),
|
|
};
|
|
active.insert(db).await.map_err(AppError::from)?;
|
|
Ok(true)
|
|
}
|
|
|
|
impl AppService {
|
|
/// Sync models from OpenRouter into the local database.
|
|
///
|
|
/// Calls OpenRouter's `GET /api/v1/models` using `OPENROUTER_API_KEY`
|
|
/// (falls back to `AI_API_KEY` if not set), then upserts provider /
|
|
/// model / version / pricing / capability / parameter-profile records.
|
|
///
|
|
/// OpenRouter returns `context_length`, `pricing`, and `architecture.modality`
|
|
/// per model — these drive all inference-free field population.
|
|
/// Capabilities are still inferred from model name patterns.
|
|
pub async fn sync_upstream_models(
|
|
&self,
|
|
_ctx: &Session,
|
|
) -> Result<SyncModelsResponse, AppError> {
|
|
// Resolve API key: prefer OPENROUTER_API_KEY env var, fall back to AI_API_KEY.
|
|
let api_key = std::env::var("OPENROUTER_API_KEY")
|
|
.ok()
|
|
.or_else(|| self.config.ai_api_key().ok())
|
|
.ok_or_else(|| {
|
|
AppError::InternalServerError(
|
|
"OPENROUTER_API_KEY or AI_API_KEY must be configured to sync models".into(),
|
|
)
|
|
})?;
|
|
|
|
let client = reqwest::Client::new();
|
|
let resp: OpenRouterResponse = client
|
|
.get("https://openrouter.ai/api/v1/models")
|
|
.header("Authorization", format!("Bearer {api_key}"))
|
|
.send()
|
|
.await
|
|
.map_err(|e| AppError::InternalServerError(format!("OpenRouter API request failed: {}", e)))?
|
|
.error_for_status()
|
|
.map_err(|e| AppError::InternalServerError(format!("OpenRouter API error: {}", e)))?
|
|
.json()
|
|
.await
|
|
.map_err(|e| AppError::InternalServerError(format!("Failed to parse OpenRouter response: {}", e)))?;
|
|
|
|
let mut models_created = 0i64;
|
|
let mut models_updated = 0i64;
|
|
let mut versions_created = 0i64;
|
|
let mut pricing_created = 0i64;
|
|
let mut capabilities_created = 0i64;
|
|
let mut profiles_created = 0i64;
|
|
|
|
for or_model in resp.data {
|
|
// Filter out openrouter/auto which has negative pricing
|
|
if or_model.id == "openrouter/auto" {
|
|
continue;
|
|
}
|
|
|
|
let provider_slug = extract_provider(&or_model.id);
|
|
let provider = upsert_provider(&self.db, provider_slug).await?;
|
|
|
|
let (model_record, is_new) =
|
|
upsert_model(&self.db, provider.id, &or_model.id, &or_model).await?;
|
|
|
|
if is_new {
|
|
models_created += 1;
|
|
} else {
|
|
models_updated += 1;
|
|
}
|
|
|
|
let (version_record, version_is_new) =
|
|
upsert_version(&self.db, model_record.id).await?;
|
|
if version_is_new {
|
|
versions_created += 1;
|
|
}
|
|
|
|
if upsert_pricing(
|
|
&self.db,
|
|
version_record.id,
|
|
or_model.pricing.as_ref(),
|
|
&or_model.id,
|
|
)
|
|
.await?
|
|
{
|
|
pricing_created += 1;
|
|
}
|
|
|
|
capabilities_created +=
|
|
upsert_capabilities(&self.db, version_record.id, &or_model.id).await?;
|
|
|
|
if upsert_parameter_profile(&self.db, version_record.id, &or_model.id).await? {
|
|
profiles_created += 1;
|
|
}
|
|
}
|
|
|
|
Ok(SyncModelsResponse {
|
|
models_created,
|
|
models_updated,
|
|
versions_created,
|
|
pricing_created,
|
|
capabilities_created,
|
|
profiles_created,
|
|
})
|
|
}
|
|
}
|