gitdataai/libs/room/src/ai.rs
2026-04-15 09:08:09 +08:00

114 lines
3.7 KiB
Rust

use crate::error::RoomError;
use crate::service::RoomService;
use crate::ws_context::WsUserContext;
use chrono::Utc;
use models::rooms::room_ai;
use sea_orm::*;
use uuid::Uuid;
impl RoomService {
pub async fn room_ai_list(
&self,
room_id: Uuid,
ctx: &WsUserContext,
) -> Result<Vec<super::RoomAiResponse>, RoomError> {
let user_id = ctx.user_id;
self.require_room_member(room_id, user_id).await?;
let models = room_ai::Entity::find()
.filter(room_ai::Column::Room.eq(room_id))
.all(&self.db)
.await?;
Ok(models
.into_iter()
.map(super::RoomAiResponse::from)
.collect())
}
pub async fn room_ai_upsert(
&self,
room_id: Uuid,
request: super::RoomAiUpsertRequest,
ctx: &WsUserContext,
) -> Result<super::RoomAiResponse, RoomError> {
let user_id = ctx.user_id;
self.require_room_admin(room_id, user_id).await?;
let now = Utc::now();
let existing = room_ai::Entity::find_by_id((room_id, request.model))
.one(&self.db)
.await?;
let model = if let Some(existing) = existing {
let mut active: room_ai::ActiveModel = existing.into();
if request.version.is_some() {
active.version = Set(request.version);
}
if request.history_limit.is_some() {
active.history_limit = Set(request.history_limit);
}
if request.system_prompt.is_some() {
active.system_prompt = Set(request.system_prompt);
}
if request.temperature.is_some() {
active.temperature = Set(request.temperature);
}
if request.max_tokens.is_some() {
active.max_tokens = Set(request.max_tokens);
}
if request.use_exact.is_some() {
active.use_exact = Set(request.use_exact.unwrap_or(false));
}
if request.think.is_some() {
active.think = Set(request.think.unwrap_or(false));
}
if request.stream.is_some() {
active.stream = Set(request.stream.unwrap_or(false));
}
if request.min_score.is_some() {
active.min_score = Set(request.min_score);
}
active.updated_at = Set(now);
active.update(&self.db).await?
} else {
room_ai::ActiveModel {
room: Set(room_id),
model: Set(request.model),
version: Set(request.version),
call_count: Set(0),
last_call_at: Set(None),
history_limit: Set(request.history_limit),
system_prompt: Set(request.system_prompt),
temperature: Set(request.temperature),
max_tokens: Set(request.max_tokens),
use_exact: Set(request.use_exact.unwrap_or(false)),
think: Set(request.think.unwrap_or(false)),
stream: Set(request.stream.unwrap_or(false)),
min_score: Set(request.min_score),
created_at: Set(now),
updated_at: Set(now),
}
.insert(&self.db)
.await?
};
Ok(super::RoomAiResponse::from(model))
}
pub async fn room_ai_delete(
&self,
room_id: Uuid,
model_id: Uuid,
ctx: &WsUserContext,
) -> Result<(), RoomError> {
let user_id = ctx.user_id;
self.require_room_admin(room_id, user_id).await?;
room_ai::Entity::delete_by_id((room_id, model_id))
.exec(&self.db)
.await?;
Ok(())
}
}