From 42f0a3b91b1b3bd34c0d9411b56c51b45666b684 Mon Sep 17 00:00:00 2001 From: ZhenYi <434836402@qq.com> Date: Tue, 14 Apr 2026 19:02:01 +0800 Subject: [PATCH] commit: no msg --- .agents/agents/code-reviewer.md | 42 + .claude/work.yaml | 4 + .dockerignore | 9 + .env.example | 109 + .gitignore | 15 + .idea/.gitignore | 10 + .idea/code.iml | 24 + .idea/modules.xml | 8 + .idea/vcs.xml | 6 + AGENT.md | 182 + Cargo.lock | 8862 ++++ Cargo.toml | 185 + README.md | 263 + apps/app/Cargo.toml | 34 + apps/app/src/args.rs | 12 + apps/app/src/logging.rs | 126 + apps/app/src/main.rs | 210 + apps/email/Cargo.toml | 30 + apps/email/src/main.rs | 84 + apps/git-hook/Cargo.toml | 27 + apps/git-hook/src/args.rs | 10 + apps/git-hook/src/main.rs | 142 + apps/gitserver/Cargo.toml | 30 + apps/gitserver/src/main.rs | 94 + apps/migrate/Cargo.toml | 13 + apps/migrate/src/main.rs | 102 + apps/operator/Cargo.toml | 30 + apps/operator/src/context.rs | 44 + apps/operator/src/controller/app.rs | 221 + apps/operator/src/controller/email_worker.rs | 68 + apps/operator/src/controller/git_hook.rs | 137 + apps/operator/src/controller/gitserver.rs | 164 + apps/operator/src/controller/helpers.rs | 96 + apps/operator/src/controller/migrate.rs | 171 + apps/operator/src/controller/mod.rs | 188 + apps/operator/src/crd.rs | 581 + apps/operator/src/lib.rs | 3 + apps/operator/src/main.rs | 100 + components.json | 25 + deploy/Chart.yaml | 13 + deploy/templates/NOTES.txt | 35 + deploy/templates/_helpers.tpl | 44 + deploy/templates/app-deployment.yaml | 111 + deploy/templates/configmap.yaml | 15 + deploy/templates/email-worker-deployment.yaml | 58 + deploy/templates/git-hook-deployment.yaml | 64 + deploy/templates/gitserver-deployment.yaml | 162 + deploy/templates/ingress.yaml | 46 + deploy/templates/migrate-job.yaml | 42 + deploy/templates/operator-deployment.yaml | 52 + deploy/templates/secret.yaml | 17 + deploy/values.yaml | 262 + docker/app.Dockerfile | 41 + docker/build.md | 171 + docker/build.sh | 52 + docker/crd/app-crd.yaml | 127 + docker/crd/email-worker-crd.yaml | 94 + docker/crd/git-hook-crd.yaml | 96 + docker/crd/gitserver-crd.yaml | 108 + docker/crd/migrate-crd.yaml | 87 + docker/email-worker.Dockerfile | 36 + docker/git-hook.Dockerfile | 36 + docker/gitserver.Dockerfile | 41 + docker/migrate.Dockerfile | 36 + docker/operator.Dockerfile | 39 + docker/operator/deployment.yaml | 128 + docker/operator/example/code-system.yaml | 280 + docs/ARCHITECTURE-LAYERS.md | 903 + eslint.config.js | 27 + index.html | 13 + libs/agent-tool-derive/Cargo.toml | 26 + libs/agent-tool-derive/src/lib.rs | 373 + libs/agent/Cargo.toml | 37 + libs/agent/chat/context.rs | 200 + libs/agent/chat/mod.rs | 61 + libs/agent/chat/service.rs | 655 + libs/agent/client.rs | 279 + libs/agent/compact/helpers.rs | 45 + libs/agent/compact/mod.rs | 8 + libs/agent/compact/service.rs | 467 + libs/agent/compact/types.rs | 130 + libs/agent/embed/client.rs | 209 + libs/agent/embed/mod.rs | 30 + libs/agent/embed/qdrant.rs | 312 + libs/agent/embed/service.rs | 232 + libs/agent/error.rs | 31 + libs/agent/lib.rs | 36 + libs/agent/perception/active.rs | 167 + libs/agent/perception/auto.rs | 178 + libs/agent/perception/mod.rs | 131 + libs/agent/perception/passive.rs | 144 + libs/agent/perception/vector.rs | 163 + libs/agent/react/hooks.rs | 130 + libs/agent/react/loop_core.rs | 439 + libs/agent/react/mod.rs | 13 + libs/agent/react/types.rs | 94 + libs/agent/task/mod.rs | 22 + libs/agent/task/service.rs | 209 + libs/agent/tokent.rs | 199 + libs/agent/tool/call.rs | 108 + libs/agent/tool/context.rs | 133 + libs/agent/tool/definition.rs | 89 + libs/agent/tool/examples.rs | 113 + libs/agent/tool/executor.rs | 142 + libs/agent/tool/mod.rs | 27 + libs/agent/tool/registry.rs | 95 + libs/api/Cargo.toml | 47 + libs/api/agent/code_review.rs | 39 + libs/api/agent/mod.rs | 120 + libs/api/agent/model.rs | 132 + libs/api/agent/model_capability.rs | 120 + libs/api/agent/model_parameter_profile.rs | 126 + libs/api/agent/model_pricing.rs | 120 + libs/api/agent/model_version.rs | 132 + libs/api/agent/pr_summary.rs | 33 + libs/api/agent/provider.rs | 117 + libs/api/auth/captcha.rs | 26 + libs/api/auth/email.rs | 62 + libs/api/auth/login.rs | 28 + libs/api/auth/logout.rs | 24 + libs/api/auth/me.rs | 24 + libs/api/auth/mod.rs | 68 + libs/api/auth/password.rs | 53 + libs/api/auth/register.rs | 48 + libs/api/auth/totp.rs | 94 + libs/api/auth/ws_token.rs | 46 + libs/api/error.rs | 113 + libs/api/gen_api.rs | 11 + libs/api/git/archive.rs | 195 + libs/api/git/blame.rs | 37 + libs/api/git/blob.rs | 213 + libs/api/git/branch.rs | 579 + libs/api/git/branch_protection.rs | 177 + libs/api/git/commit.rs | 1002 + libs/api/git/contributors.rs | 33 + libs/api/git/diff.rs | 232 + libs/api/git/init.rs | 113 + libs/api/git/mod.rs | 417 + libs/api/git/refs.rs | 226 + libs/api/git/repo.rs | 542 + libs/api/git/star.rs | 149 + libs/api/git/tag.rs | 434 + libs/api/git/tree.rs | 278 + libs/api/git/watch.rs | 153 + libs/api/git/webhook.rs | 153 + libs/api/issue/assignee.rs | 89 + libs/api/issue/comment.rs | 159 + libs/api/issue/comment_reaction.rs | 96 + libs/api/issue/issue_label.rs | 87 + libs/api/issue/label.rs | 76 + libs/api/issue/mod.rs | 359 + libs/api/issue/pull_request.rs | 90 + libs/api/issue/reaction.rs | 87 + libs/api/issue/repo.rs | 89 + libs/api/issue/subscriber.rs | 84 + libs/api/lib.rs | 16 + libs/api/openapi.rs | 714 + libs/api/project/activity.rs | 88 + libs/api/project/audit.rs | 82 + libs/api/project/billing.rs | 53 + libs/api/project/board.rs | 324 + libs/api/project/info.rs | 25 + libs/api/project/init.rs | 24 + libs/api/project/invitation.rs | 168 + libs/api/project/join_answers.rs | 59 + libs/api/project/join_request.rs | 142 + libs/api/project/join_settings.rs | 53 + libs/api/project/labels.rs | 129 + libs/api/project/like.rs | 116 + libs/api/project/members.rs | 88 + libs/api/project/mod.rs | 281 + libs/api/project/repo.rs | 57 + libs/api/project/settings.rs | 82 + libs/api/project/transfer_repo.rs | 33 + libs/api/project/watch.rs | 116 + libs/api/pull_request/merge.rs | 144 + libs/api/pull_request/mod.rs | 136 + libs/api/pull_request/pull_request.rs | 682 + libs/api/pull_request/review.rs | 122 + libs/api/pull_request/review_comment.rs | 229 + libs/api/pull_request/review_request.rs | 121 + libs/api/room/ai.rs | 104 + libs/api/room/category.rs | 137 + libs/api/room/draft_and_history.rs | 97 + libs/api/room/member.rs | 208 + libs/api/room/message.rs | 189 + libs/api/room/mod.rs | 173 + libs/api/room/notification.rs | 132 + libs/api/room/pin.rs | 103 + libs/api/room/reaction.rs | 155 + libs/api/room/room.rs | 176 + libs/api/room/thread.rs | 121 + libs/api/room/ws.rs | 705 + libs/api/room/ws_handler.rs | 729 + libs/api/room/ws_types.rs | 642 + libs/api/room/ws_universal.rs | 442 + libs/api/route.rs | 29 + libs/api/search/mod.rs | 7 + libs/api/search/service.rs | 31 + libs/api/skill.rs | 244 + libs/api/user/access_key.rs | 63 + libs/api/user/chpc.rs | 48 + libs/api/user/mod.rs | 118 + libs/api/user/notification.rs | 42 + libs/api/user/preferences.rs | 42 + libs/api/user/profile.rs | 62 + libs/api/user/projects.rs | 48 + libs/api/user/repository.rs | 48 + libs/api/user/ssh_key.rs | 110 + libs/api/user/subscribe.rs | 133 + libs/api/user/user_info.rs | 25 + libs/api/workspace/billing.rs | 84 + libs/api/workspace/info.rs | 42 + libs/api/workspace/init.rs | 26 + libs/api/workspace/members.rs | 205 + libs/api/workspace/mod.rs | 68 + libs/api/workspace/projects.rs | 32 + libs/api/workspace/settings.rs | 55 + libs/api/workspace/stats.rs | 29 + libs/avatar/Cargo.toml | 23 + libs/avatar/lib.rs | 45 + libs/config/Cargo.toml | 24 + libs/config/ai.rs | 16 + libs/config/app.rs | 23 + libs/config/avatar.rs | 17 + libs/config/database.rs | 70 + libs/config/domain.rs | 29 + libs/config/embed.rs | 37 + libs/config/hook.rs | 88 + libs/config/lib.rs | 49 + libs/config/logs.rs | 94 + libs/config/qdrant.rs | 17 + libs/config/redis.rs | 34 + libs/config/smtp.rs | 52 + libs/config/ssh.rs | 38 + libs/db/Cargo.toml | 26 + libs/db/cache.rs | 37 + libs/db/database.rs | 194 + libs/db/lib.rs | 2 + libs/email/Cargo.toml | 25 + libs/email/lib.rs | 77 + libs/git/Cargo.toml | 54 + libs/git/archive/mod.rs | 3 + libs/git/archive/ops.rs | 552 + libs/git/archive/types.rs | 78 + libs/git/blame/mod.rs | 2 + libs/git/blame/ops.rs | 262 + libs/git/blob/mod.rs | 3 + libs/git/blob/ops.rs | 80 + libs/git/blob/types.rs | 41 + libs/git/branch/merge.rs | 150 + libs/git/branch/mod.rs | 5 + libs/git/branch/ops.rs | 198 + libs/git/branch/query.rs | 265 + libs/git/branch/types.rs | 29 + libs/git/commit/cherry_pick.rs | 105 + libs/git/commit/create.rs | 314 + libs/git/commit/graph.rs | 218 + libs/git/commit/meta.rs | 217 + libs/git/commit/mod.rs | 10 + libs/git/commit/query.rs | 231 + libs/git/commit/rebase.rs | 136 + libs/git/commit/revert.rs | 156 + libs/git/commit/traverse.rs | 218 + libs/git/commit/types.rs | 163 + libs/git/config/mod.rs | 3 + libs/git/config/ops.rs | 93 + libs/git/config/types.rs | 16 + libs/git/description/mod.rs | 39 + libs/git/diff/mod.rs | 3 + libs/git/diff/ops.rs | 510 + libs/git/diff/types.rs | 313 + libs/git/domain.rs | 58 + libs/git/error.rs | 98 + libs/git/hook/event.rs | 7 + libs/git/hook/mod.rs | 70 + libs/git/hook/pool/log.rs | 103 + libs/git/hook/pool/metrics.rs | 42 + libs/git/hook/pool/mod.rs | 481 + libs/git/hook/pool/redis.rs | 165 + libs/git/hook/pool/types.rs | 40 + libs/git/hook/sync/branch.rs | 141 + libs/git/hook/sync/commit.rs | 245 + libs/git/hook/sync/fsck.rs | 140 + libs/git/hook/sync/gc.rs | 29 + libs/git/hook/sync/lfs.rs | 89 + libs/git/hook/sync/lock.rs | 63 + libs/git/hook/sync/mod.rs | 364 + libs/git/hook/sync/remote.rs | 0 libs/git/hook/sync/status.rs | 98 + libs/git/hook/sync/tag.rs | 99 + libs/git/hook/webhook_dispatch.rs | 410 + libs/git/http/auth.rs | 66 + libs/git/http/handler.rs | 321 + libs/git/http/lfs.rs | 458 + libs/git/http/lfs_routes.rs | 222 + libs/git/http/mod.rs | 126 + libs/git/http/rate_limit.rs | 142 + libs/git/http/routes.rs | 107 + libs/git/http/utils.rs | 80 + libs/git/lfs/mod.rs | 4 + libs/git/lfs/ops.rs | 320 + libs/git/lfs/types.rs | 151 + libs/git/lib.rs | 45 + libs/git/merge/mod.rs | 3 + libs/git/merge/ops.rs | 345 + libs/git/merge/types.rs | 112 + libs/git/ref_utils.rs | 35 + libs/git/reference/mod.rs | 3 + libs/git/reference/ops.rs | 257 + libs/git/reference/types.rs | 18 + libs/git/ssh/authz.rs | 307 + libs/git/ssh/handle.rs | 889 + libs/git/ssh/mod.rs | 288 + libs/git/ssh/rate_limit.rs | 134 + libs/git/ssh/server.rs | 109 + libs/git/tags/mod.rs | 4 + libs/git/tags/ops.rs | 228 + libs/git/tags/query.rs | 201 + libs/git/tags/types.rs | 21 + libs/git/tree/mod.rs | 3 + libs/git/tree/query.rs | 133 + libs/git/tree/types.rs | 54 + libs/migrate/Cargo.toml | 27 + libs/migrate/lib.rs | 248 + ...250628_000001_create_room_notifications.rs | 37 + libs/migrate/m20250628_000002_create_user.rs | 30 + .../m20250628_000003_create_user_2fa.rs | 30 + ...0250628_000004_create_user_activity_log.rs | 30 + .../m20250628_000005_create_user_email.rs | 30 + ...0250628_000006_create_user_notification.rs | 30 + .../m20250628_000007_create_user_password.rs | 30 + ...50628_000008_create_user_password_reset.rs | 30 + ...20250628_000009_create_user_preferences.rs | 30 + .../m20250628_000010_create_user_relation.rs | 30 + .../m20250628_000011_create_user_ssh_key.rs | 30 + .../m20250628_000012_create_user_token.rs | 30 + .../m20250628_000013_create_project.rs | 30 + ...250628_000014_create_project_access_log.rs | 30 + ...0250628_000015_create_project_audit_log.rs | 30 + ...m20250628_000016_create_project_billing.rs | 30 + ...8_000017_create_project_billing_history.rs | 30 + .../m20250628_000018_create_project_follow.rs | 30 + ...0628_000019_create_project_history_name.rs | 30 + .../m20250628_000020_create_project_label.rs | 30 + .../m20250628_000021_create_project_like.rs | 30 + ...00022_create_project_member_invitations.rs | 30 + ...0023_create_project_member_join_answers.rs | 30 + ...0024_create_project_member_join_request.rs | 30 + ...025_create_project_member_join_settings.rs | 30 + ...m20250628_000026_create_project_members.rs | 30 + .../m20250628_000027_create_project_watch.rs | 30 + libs/migrate/m20250628_000028_create_repo.rs | 30 + .../m20250628_000029_create_repo_branch.rs | 30 + ...50628_000030_create_repo_branch_protect.rs | 30 + ...0250628_000031_create_repo_collaborator.rs | 30 + .../m20250628_000032_create_repo_commit.rs | 30 + .../m20250628_000033_create_repo_fork.rs | 30 + ...0250628_000034_create_repo_history_name.rs | 30 + .../m20250628_000035_create_repo_hook.rs | 30 + .../m20250628_000036_create_repo_lfs_lock.rs | 30 + ...m20250628_000037_create_repo_lfs_object.rs | 30 + .../m20250628_000038_create_repo_lock.rs | 30 + .../m20250628_000039_create_repo_star.rs | 30 + .../m20250628_000040_create_repo_tag.rs | 30 + .../m20250628_000041_create_repo_upstream.rs | 30 + .../m20250628_000042_create_repo_watch.rs | 30 + .../m20250628_000043_create_repo_webhook.rs | 30 + libs/migrate/m20250628_000044_create_issue.rs | 30 + .../m20250628_000045_create_issue_assignee.rs | 30 + .../m20250628_000046_create_issue_comment.rs | 30 + ...28_000047_create_issue_comment_reaction.rs | 30 + .../m20250628_000048_create_issue_label.rs | 30 + ...250628_000049_create_issue_pull_request.rs | 30 + .../m20250628_000050_create_issue_reaction.rs | 30 + .../m20250628_000051_create_issue_repo.rs | 30 + ...20250628_000052_create_issue_subscriber.rs | 30 + .../m20250628_000053_create_pull_request.rs | 30 + ...50628_000054_create_pull_request_commit.rs | 30 + ...50628_000055_create_pull_request_review.rs | 30 + ...0056_create_pull_request_review_comment.rs | 30 + .../m20250628_000057_create_room_category.rs | 30 + libs/migrate/m20250628_000058_create_room.rs | 30 + .../m20250628_000059_create_room_ai.rs | 30 + .../m20250628_000060_create_room_member.rs | 30 + .../m20250628_000061_create_room_message.rs | 30 + .../m20250628_000062_create_room_pin.rs | 30 + .../m20250628_000063_create_room_thread.rs | 30 + .../m20250628_000064_create_ai_model.rs | 30 + ...50628_000065_create_ai_model_capability.rs | 30 + ...00066_create_ai_model_parameter_profile.rs | 30 + ...20250628_000067_create_ai_model_pricing.rs | 30 + ...0250628_000068_create_ai_model_provider.rs | 30 + ...20250628_000069_create_ai_model_version.rs | 30 + .../m20250628_000070_create_ai_session.rs | 30 + .../m20250628_000071_create_ai_tool_auth.rs | 30 + .../m20250628_000072_create_ai_tool_call.rs | 30 + libs/migrate/m20250628_000073_create_label.rs | 30 + .../migrate/m20250628_000074_create_notify.rs | 30 + ...50628_000075_fix_column_types_and_names.rs | 27 + ...0250628_000076_create_user_email_change.rs | 30 + ...20250628_000077_create_project_activity.rs | 28 + ...8_000078_add_room_member_do_not_disturb.rs | 31 + ...628_000079_add_room_message_in_reply_to.rs | 30 + ...000080_add_message_reactions_and_search.rs | 28 + ...0250628_000081_add_message_edit_history.rs | 22 + ...28_000082_add_pr_review_comment_resolve.rs | 32 + .../m20250628_000083_add_pr_review_request.rs | 30 + ...60407_000001_extend_repo_branch_protect.rs | 30 + .../m20260407_000002_create_project_board.rs | 30 + ...20260407_000003_add_repo_ai_code_review.rs | 30 + .../m20260411_000001_create_workspace.rs | 30 + ...0411_000002_create_workspace_membership.rs | 30 + ...0411_000003_add_workspace_id_to_project.rs | 30 + ...dd_invite_token_to_workspace_membership.rs | 32 + ...0260412_000001_create_workspace_billing.rs | 37 + ...000002_create_workspace_billing_history.rs | 30 + .../m20260412_000003_create_project_skill.rs | 23 + .../m20260413_000001_add_skill_commit_blob.rs | 23 + .../m20260414_000001_create_agent_task.rs | 23 + ...50628_000001_create_room_notifications.sql | 31 + .../sql/m20250628_000002_create_user.sql | 13 + .../sql/m20250628_000003_create_user_2fa.sql | 9 + ...250628_000004_create_user_activity_log.sql | 12 + .../m20250628_000005_create_user_email.sql | 7 + ...250628_000006_create_user_notification.sql | 15 + .../m20250628_000007_create_user_password.sql | 8 + ...0628_000008_create_user_password_reset.sql | 9 + ...0250628_000009_create_user_preferences.sql | 10 + .../m20250628_000010_create_user_relation.sql | 10 + .../m20250628_000011_create_user_ssh_key.sql | 17 + .../m20250628_000012_create_user_token.sql | 13 + .../sql/m20250628_000013_create_project.sql | 14 + ...50628_000014_create_project_access_log.sql | 12 + ...250628_000015_create_project_audit_log.sql | 13 + ...20250628_000016_create_project_billing.sql | 8 + ..._000017_create_project_billing_history.sql | 12 + ...m20250628_000018_create_project_follow.sql | 9 + ...628_000019_create_project_history_name.sql | 8 + .../m20250628_000020_create_project_label.sql | 8 + .../m20250628_000021_create_project_like.sql | 6 + ...0022_create_project_member_invitations.sql | 14 + ...023_create_project_member_join_answers.sql | 11 + ...024_create_project_member_join_request.sql | 15 + ...25_create_project_member_join_settings.sql | 9 + ...20250628_000026_create_project_members.sql | 10 + .../m20250628_000027_create_project_watch.sql | 11 + .../sql/m20250628_000028_create_repo.sql | 15 + .../m20250628_000029_create_repo_branch.sql | 12 + ...0628_000030_create_repo_branch_protect.sql | 14 + ...250628_000031_create_repo_collaborator.sql | 7 + .../m20250628_000032_create_repo_commit.sql | 17 + .../sql/m20250628_000033_create_repo_fork.sql | 10 + ...250628_000034_create_repo_history_name.sql | 9 + .../sql/m20250628_000035_create_repo_hook.sql | 9 + .../m20250628_000036_create_repo_lfs_lock.sql | 9 + ...20250628_000037_create_repo_lfs_object.sql | 11 + .../sql/m20250628_000038_create_repo_lock.sql | 9 + .../sql/m20250628_000039_create_repo_star.sql | 9 + .../sql/m20250628_000040_create_repo_tag.sql | 12 + .../m20250628_000041_create_repo_upstream.sql | 15 + .../m20250628_000042_create_repo_watch.sql | 12 + .../m20250628_000043_create_repo_webhook.sql | 13 + .../sql/m20250628_000044_create_issue.sql | 18 + ...m20250628_000045_create_issue_assignee.sql | 6 + .../m20250628_000046_create_issue_comment.sql | 10 + ...8_000047_create_issue_comment_reaction.sql | 7 + .../m20250628_000048_create_issue_label.sql | 6 + ...50628_000049_create_issue_pull_request.sql | 7 + ...m20250628_000050_create_issue_reaction.sql | 7 + .../m20250628_000051_create_issue_repo.sql | 6 + ...0250628_000052_create_issue_subscriber.sql | 7 + .../m20250628_000053_create_pull_request.sql | 21 + ...0628_000054_create_pull_request_commit.sql | 14 + ...0628_000055_create_pull_request_review.sql | 11 + ...056_create_pull_request_review_comment.sql | 15 + .../m20250628_000057_create_room_category.sql | 10 + .../sql/m20250628_000058_create_room.sql | 13 + .../sql/m20250628_000059_create_room_ai.sql | 17 + .../m20250628_000060_create_room_member.sql | 9 + .../m20250628_000061_create_room_message.sql | 18 + .../sql/m20250628_000062_create_room_pin.sql | 7 + .../m20250628_000063_create_room_thread.sql | 13 + .../sql/m20250628_000064_create_ai_model.sql | 16 + ...0628_000065_create_ai_model_capability.sql | 9 + ...0066_create_ai_model_parameter_profile.sql | 13 + ...0250628_000067_create_ai_model_pricing.sql | 10 + ...250628_000068_create_ai_model_provider.sql | 9 + ...0250628_000069_create_ai_model_version.sql | 12 + .../m20250628_000070_create_ai_session.sql | 16 + .../m20250628_000071_create_ai_tool_auth.sql | 16 + .../m20250628_000072_create_ai_tool_call.sql | 20 + .../sql/m20250628_000073_create_label.sql | 8 + .../sql/m20250628_000074_create_notify.sql | 15 + ...0628_000075_fix_column_types_and_names.sql | 1 + ...250628_000076_create_user_email_change.sql | 10 + ...0250628_000077_create_project_activity.sql | 18 + ..._000078_add_room_member_do_not_disturb.sql | 9 + ...28_000079_add_room_message_in_reply_to.sql | 2 + ...00080_add_message_reactions_and_search.sql | 48 + ...250628_000081_add_message_edit_history.sql | 12 + ...8_000082_add_pr_review_comment_resolve.sql | 6 + ...m20250628_000083_add_pr_review_request.sql | 11 + ...0407_000001_extend_repo_branch_protect.sql | 5 + .../m20260407_000002_create_project_board.sql | 44 + ...0260407_000003_add_repo_ai_code_review.sql | 1 + .../sql/m20260411_000001_create_workspace.sql | 18 + ...411_000002_create_workspace_membership.sql | 13 + ...411_000003_add_workspace_id_to_project.sql | 4 + ...d_invite_token_to_workspace_membership.sql | 5 + ...260412_000001_create_workspace_billing.sql | 9 + ...00002_create_workspace_billing_history.sql | 14 + .../m20260412_000003_create_project_skill.sql | 21 + ...m20260413_000001_add_skill_commit_blob.sql | 7 + .../m20260414_000001_create_agent_task.sql | 34 + libs/models/Cargo.toml | 28 + libs/models/agent_task/mod.rs | 154 + libs/models/agents/mod.rs | 171 + libs/models/agents/model.rs | 42 + libs/models/agents/model_capability.rs | 27 + libs/models/agents/model_parameter_profile.rs | 22 + libs/models/agents/model_pricing.rs | 28 + libs/models/agents/model_provider.rs | 29 + libs/models/agents/model_version.rs | 30 + libs/models/ai/ai_session.rs | 26 + libs/models/ai/ai_tool_auth.rs | 28 + libs/models/ai/ai_tool_call.rs | 37 + libs/models/ai/mod.rs | 46 + libs/models/issues/issue.rs | 39 + libs/models/issues/issue_assignee.rs | 18 + libs/models/issues/issue_comment.rs | 20 + libs/models/issues/issue_comment_reaction.rs | 28 + libs/models/issues/issue_label.rs | 18 + libs/models/issues/issue_pull_request.rs | 20 + libs/models/issues/issue_reaction.rs | 28 + libs/models/issues/issue_repo.rs | 18 + libs/models/issues/issue_subscriber.rs | 19 + libs/models/issues/mod.rs | 81 + libs/models/lib.rs | 37 + libs/models/projects/mod.rs | 73 + libs/models/projects/project.rs | 24 + libs/models/projects/project_access_log.rs | 63 + libs/models/projects/project_activity.rs | 195 + libs/models/projects/project_audit_log.rs | 63 + libs/models/projects/project_billing.rs | 25 + .../projects/project_billing_history.rs | 27 + libs/models/projects/project_board.rs | 22 + libs/models/projects/project_board_card.rs | 28 + libs/models/projects/project_board_column.rs | 20 + libs/models/projects/project_follow.rs | 18 + libs/models/projects/project_history_name.rs | 18 + libs/models/projects/project_label.rs | 20 + libs/models/projects/project_like.rs | 16 + .../projects/project_member_invitations.rs | 32 + .../projects/project_member_join_answers.rs | 21 + .../projects/project_member_join_request.rs | 57 + .../projects/project_member_join_settings.rs | 22 + libs/models/projects/project_members.rs | 29 + libs/models/projects/project_skill.rs | 115 + libs/models/projects/project_watch.rs | 20 + libs/models/pull_request/mod.rs | 93 + libs/models/pull_request/pull_request.rs | 37 + .../pull_request/pull_request_commit.rs | 27 + .../pull_request/pull_request_review.rs | 32 + .../pull_request_review_comment.rs | 37 + .../pull_request_review_request.rs | 27 + libs/models/repos/mod.rs | 155 + libs/models/repos/repo.rs | 26 + libs/models/repos/repo_branch.rs | 22 + libs/models/repos/repo_branch_protect.rs | 28 + libs/models/repos/repo_collaborator.rs | 27 + libs/models/repos/repo_commit.rs | 27 + libs/models/repos/repo_fork.rs | 19 + libs/models/repos/repo_history_name.rs | 21 + libs/models/repos/repo_hook.rs | 21 + libs/models/repos/repo_lfs_lock.rs | 31 + libs/models/repos/repo_lfs_object.rs | 23 + libs/models/repos/repo_lock.rs | 31 + libs/models/repos/repo_star.rs | 20 + libs/models/repos/repo_tag.rs | 26 + libs/models/repos/repo_upstream.rs | 34 + libs/models/repos/repo_watch.rs | 23 + libs/models/repos/repo_webhook.rs | 25 + libs/models/rooms/mod.rs | 135 + libs/models/rooms/room.rs | 76 + libs/models/rooms/room_ai.rs | 55 + libs/models/rooms/room_category.rs | 30 + libs/models/rooms/room_member.rs | 49 + libs/models/rooms/room_message.rs | 68 + .../models/rooms/room_message_edit_history.rs | 35 + libs/models/rooms/room_message_reaction.rs | 74 + libs/models/rooms/room_notifications.rs | 85 + libs/models/rooms/room_pin.rs | 44 + libs/models/rooms/room_thread.rs | 44 + libs/models/system/label.rs | 19 + libs/models/system/mod.rs | 5 + libs/models/system/notify.rs | 25 + libs/models/users/mod.rs | 25 + libs/models/users/user.rs | 23 + libs/models/users/user_2fa.rs | 49 + libs/models/users/user_activity_log.rs | 69 + libs/models/users/user_email.rs | 17 + libs/models/users/user_email_change.rs | 20 + libs/models/users/user_notification.rs | 57 + libs/models/users/user_password.rs | 20 + libs/models/users/user_password_reset.rs | 19 + libs/models/users/user_preferences.rs | 22 + libs/models/users/user_relation.rs | 47 + libs/models/users/user_ssh_key.rs | 57 + libs/models/users/user_token.rs | 24 + libs/models/workspaces/mod.rs | 9 + libs/models/workspaces/workspace.rs | 27 + libs/models/workspaces/workspace_billing.rs | 24 + .../workspaces/workspace_billing_history.rs | 27 + .../models/workspaces/workspace_membership.rs | 61 + libs/queue/Cargo.toml | 34 + libs/queue/lib.rs | 15 + libs/queue/producer.rs | 228 + libs/queue/types.rs | 117 + libs/queue/worker.rs | 294 + libs/room/Cargo.toml | 48 + libs/room/src/ai.rs | 113 + libs/room/src/category.rs | 168 + libs/room/src/connection.rs | 998 + libs/room/src/draft_and_history.rs | 198 + libs/room/src/error.rs | 34 + libs/room/src/helpers.rs | 451 + libs/room/src/lib.rs | 34 + libs/room/src/member.rs | 370 + libs/room/src/message.rs | 376 + libs/room/src/metrics.rs | 193 + libs/room/src/notification.rs | 322 + libs/room/src/pin.rs | 98 + libs/room/src/reaction.rs | 322 + libs/room/src/room.rs | 300 + libs/room/src/room_ai_queue.rs | 225 + libs/room/src/search.rs | 284 + libs/room/src/service.rs | 1174 + libs/room/src/thread.rs | 105 + libs/room/src/types.rs | 345 + libs/room/src/ws_context.rs | 11 + libs/rpc/Cargo.toml | 20 + libs/rpc/lib.rs | 14 + libs/service/Cargo.toml | 59 + libs/service/agent/billing.rs | 197 + libs/service/agent/code_review.rs | 544 + libs/service/agent/mod.rs | 11 + libs/service/agent/model.rs | 197 + libs/service/agent/model_capability.rs | 137 + libs/service/agent/model_parameter_profile.rs | 163 + libs/service/agent/model_pricing.rs | 148 + libs/service/agent/model_version.rs | 158 + libs/service/agent/pr_summary.rs | 374 + libs/service/agent/provider.rs | 138 + libs/service/agent/sync.rs | 638 + libs/service/auth/captcha.rs | 67 + libs/service/auth/email.rs | 185 + libs/service/auth/login.rs | 122 + libs/service/auth/logout.rs | 28 + libs/service/auth/me.rs | 29 + libs/service/auth/mod.rs | 9 + libs/service/auth/password.rs | 188 + libs/service/auth/register.rs | 171 + libs/service/auth/rsa.rs | 66 + libs/service/auth/totp.rs | 431 + libs/service/error.rs | 264 + libs/service/git/archive.rs | 303 + libs/service/git/blame.rs | 245 + libs/service/git/blob.rs | 485 + libs/service/git/blocking.rs | 15 + libs/service/git/branch.rs | 915 + libs/service/git/branch_protection.rs | 349 + libs/service/git/commit.rs | 1361 + libs/service/git/contributors.rs | 132 + libs/service/git/diff.rs | 632 + libs/service/git/init.rs | 66 + libs/service/git/mod.rs | 20 + libs/service/git/refs.rs | 356 + libs/service/git/repo.rs | 463 + libs/service/git/star.rs | 212 + libs/service/git/tag.rs | 648 + libs/service/git/tree.rs | 351 + libs/service/git/watch.rs | 219 + libs/service/git/webhook.rs | 298 + libs/service/issue/assignee.rs | 247 + libs/service/issue/comment.rs | 373 + libs/service/issue/issue.rs | 587 + libs/service/issue/label.rs | 353 + libs/service/issue/mod.rs | 23 + libs/service/issue/pull_request.rs | 208 + libs/service/issue/reaction.rs | 343 + libs/service/issue/repo.rs | 150 + libs/service/issue/subscriber.rs | 227 + libs/service/lib.rs | 217 + libs/service/project/activity.rs | 445 + libs/service/project/audit.rs | 183 + libs/service/project/avatar.rs | 59 + libs/service/project/billing.rs | 182 + libs/service/project/board.rs | 496 + libs/service/project/can_use.rs | 42 + libs/service/project/info.rs | 143 + libs/service/project/init.rs | 140 + libs/service/project/invitation.rs | 503 + libs/service/project/join_answers.rs | 134 + libs/service/project/join_request.rs | 506 + libs/service/project/join_settings.rs | 139 + libs/service/project/labels.rs | 354 + libs/service/project/like.rs | 199 + libs/service/project/members.rs | 318 + libs/service/project/mod.rs | 20 + libs/service/project/repo.rs | 337 + libs/service/project/repo_permission.rs | 163 + libs/service/project/settings.rs | 197 + libs/service/project/standard.rs | 61 + libs/service/project/transfer_repo.rs | 193 + libs/service/project/watch.rs | 233 + libs/service/pull_request/merge.rs | 454 + libs/service/pull_request/mod.rs | 31 + libs/service/pull_request/pull_request.rs | 841 + libs/service/pull_request/review.rs | 328 + libs/service/pull_request/review_comment.rs | 561 + libs/service/pull_request/review_request.rs | 325 + libs/service/search/mod.rs | 3 + libs/service/search/service.rs | 468 + libs/service/skill/info.rs | 104 + libs/service/skill/manage.rs | 174 + libs/service/skill/mod.rs | 9 + libs/service/skill/scan.rs | 53 + libs/service/skill/scanner.rs | 238 + libs/service/user/access_key.rs | 241 + libs/service/user/avatar.rs | 59 + libs/service/user/chpc.rs | 223 + libs/service/user/mod.rs | 12 + libs/service/user/notification.rs | 200 + libs/service/user/notify.rs | 1 + libs/service/user/preferences.rs | 151 + libs/service/user/profile.rs | 118 + libs/service/user/projects.rs | 142 + libs/service/user/repository.rs | 117 + libs/service/user/ssh_key.rs | 396 + libs/service/user/subscribe.rs | 157 + libs/service/user/user_info.rs | 109 + libs/service/utils/mod.rs | 4 + libs/service/utils/project.rs | 131 + libs/service/utils/repo.rs | 83 + libs/service/utils/user.rs | 44 + libs/service/utils/workspace.rs | 74 + libs/service/webhook_dispatch.rs | 140 + libs/service/workspace/billing.rs | 265 + libs/service/workspace/info.rs | 329 + libs/service/workspace/init.rs | 97 + libs/service/workspace/members.rs | 481 + libs/service/workspace/mod.rs | 5 + libs/service/workspace/settings.rs | 82 + libs/service/ws_token.rs | 103 + libs/session/Cargo.toml | 32 + libs/session/config.rs | 213 + libs/session/lib.rs | 15 + libs/session/middleware.rs | 319 + libs/session/session.rs | 403 + libs/session/session_ext.rs | 35 + libs/session/storage/format.rs | 75 + libs/session/storage/interface.rs | 91 + libs/session/storage/mod.rs | 14 + libs/session/storage/redis_cluster.rs | 180 + libs/session/storage/session_key.rs | 48 + libs/session/storage/utils.rs | 10 + libs/transport/Cargo.toml | 20 + libs/transport/lib.rs | 14 + libs/webhook/Cargo.toml | 20 + libs/webhook/lib.rs | 14 + openapi-ts.config.ts | 8 + openapi.json | 41489 ++++++++++++++++ openspec/config.yaml | 20 + package.json | 73 + pnpm-lock.yaml | 7114 +++ public/logo.png | Bin 0 -> 95527 bytes scripts/fix-openapi-tags.js | 28 + scripts/gen-client.js | 55 + src/App.css | 184 + src/App.tsx | 176 + src/app/auth/accept-workspace-invite-page.tsx | 182 + src/app/auth/index.ts | 3 + src/app/auth/login-page.tsx | 268 + src/app/auth/password-reset-page.tsx | 199 + src/app/auth/register-page.tsx | 292 + src/app/auth/verify-email-page.tsx | 161 + src/app/init/project.tsx | 263 + src/app/init/repository.tsx | 365 + src/app/init/workspace.tsx | 206 + src/app/notify/layout.tsx | 80 + src/app/notify/page.tsx | 290 + src/app/page.tsx | 40 + src/app/project/activity.tsx | 324 + src/app/project/articles.tsx | 36 + src/app/project/boards.tsx | 179 + src/app/project/boards/[boardId].tsx | 292 + src/app/project/issue-detail.tsx | 525 + src/app/project/issue-edit.tsx | 280 + src/app/project/issue-new.tsx | 600 + src/app/project/issues.tsx | 233 + src/app/project/issues/board-card.tsx | 59 + src/app/project/issues/board-column.tsx | 88 + src/app/project/issues/config.ts | 117 + src/app/project/issues/list-row.tsx | 62 + src/app/project/issues/view-store.ts | 24 + src/app/project/layout.tsx | 22 + src/app/project/member.tsx | 482 + src/app/project/overview.tsx | 619 + src/app/project/repo/branches.tsx | 29 + src/app/project/repo/commits.tsx | 29 + src/app/project/repo/contributors.tsx | 29 + src/app/project/repo/files.tsx | 29 + src/app/project/repo/layout.tsx | 100 + src/app/project/repo/overview.tsx | 36 + src/app/project/repo/pull-request-detail.tsx | 29 + src/app/project/repo/pull-request-new.tsx | 29 + src/app/project/repo/pull-requests.tsx | 32 + src/app/project/repo/settings.tsx | 38 + src/app/project/repo/tags.tsx | 29 + src/app/project/repositories.tsx | 448 + src/app/project/resources.tsx | 36 + src/app/project/room.tsx | 177 + src/app/project/settings.tsx | 92 + src/app/project/settings/billing.tsx | 234 + src/app/project/settings/general.tsx | 154 + src/app/project/settings/labels.tsx | 328 + src/app/project/settings/members.tsx | 129 + src/app/project/settings/oauth.tsx | 27 + src/app/project/settings/skills.tsx | 477 + src/app/project/settings/webhook.tsx | 27 + src/app/project/skills-init.tsx | 183 + src/app/project/types.ts | 48 + src/app/repository/branches.tsx | 578 + src/app/repository/commit-diff.tsx | 564 + src/app/repository/commits.tsx | 781 + src/app/repository/contributors.tsx | 109 + src/app/repository/files.tsx | 112 + src/app/repository/layout.tsx | 28 + src/app/repository/overview.tsx | 275 + src/app/repository/pull-request-detail.tsx | 283 + src/app/repository/pull-request-new.tsx | 265 + src/app/repository/pull-requests.tsx | 241 + src/app/repository/settings.tsx | 6 + src/app/repository/settings/archive.tsx | 109 + src/app/repository/settings/branches.tsx | 193 + src/app/repository/settings/general.tsx | 285 + src/app/repository/settings/layout.tsx | 109 + src/app/repository/settings/members.tsx | 188 + src/app/repository/settings/tags.tsx | 167 + src/app/repository/settings/webhooks.tsx | 216 + src/app/repository/tags.tsx | 309 + src/app/search/page.tsx | 364 + src/app/settings/account.tsx | 517 + src/app/settings/activity.tsx | 239 + src/app/settings/layout.tsx | 156 + src/app/settings/preferences.tsx | 309 + src/app/settings/profile.tsx | 303 + src/app/settings/security.tsx | 69 + src/app/settings/ssh-keys.tsx | 298 + src/app/settings/tokens.tsx | 413 + src/app/user/user.tsx | 637 + src/app/workspace/billing.tsx | 166 + src/app/workspace/layout.tsx | 46 + src/app/workspace/members.tsx | 469 + src/app/workspace/overview.tsx | 161 + src/app/workspace/projects.tsx | 91 + src/app/workspace/redirect.tsx | 33 + src/app/workspace/settings.tsx | 186 + src/assets/hero.png | Bin 0 -> 44919 bytes src/assets/react.svg | 1 + src/assets/vite.svg | 1 + src/client/client.gen.ts | 23 + src/client/client/client.gen.ts | 159 + src/client/client/index.ts | 23 + src/client/client/types.gen.ts | 161 + src/client/client/utils.gen.ts | 208 + src/client/core/auth.gen.ts | 41 + src/client/core/bodySerializer.gen.ts | 82 + src/client/core/params.gen.ts | 169 + src/client/core/pathSerializer.gen.ts | 171 + src/client/core/queryKeySerializer.gen.ts | 117 + src/client/core/serverSentEvents.gen.ts | 243 + src/client/core/types.gen.ts | 104 + src/client/core/utils.gen.ts | 140 + src/client/index.ts | 4 + src/client/sdk.gen.ts | 2689 + src/client/types.gen.ts | 18720 +++++++ src/components/auth/auth-layout.tsx | 29 + src/components/auth/captcha-image.tsx | 34 + src/components/auth/index.ts | 2 + src/components/auth/protected-route.tsx | 22 + src/components/init-layout.tsx | 27 + src/components/landing/index.ts | 3 + src/components/landing/landing-footer.tsx | 67 + src/components/landing/landing-nav.tsx | 44 + src/components/landing/landing-sections.tsx | 288 + src/components/layout/sidebar-system.tsx | 174 + src/components/layout/sidebar-user.tsx | 92 + src/components/layout/workspace-sidebar.tsx | 178 + src/components/project/KanbanBoard.tsx | 165 + src/components/project/KanbanCard.tsx | 122 + src/components/project/KanbanColumn.tsx | 448 + src/components/project/sidebar.tsx | 199 + src/components/repository/PRCommentInput.tsx | 130 + src/components/repository/PRCommitList.tsx | 79 + src/components/repository/PRConversation.tsx | 425 + src/components/repository/PRDiffViewer.tsx | 352 + src/components/repository/PRInlineComment.tsx | 320 + src/components/repository/PRMergeBox.tsx | 242 + src/components/repository/file-browser.tsx | 366 + src/components/repository/header.tsx | 180 + src/components/repository/sidebar.tsx | 208 + src/components/room/CreateRoomDialog.tsx | 80 + src/components/room/DeleteRoomAlert.tsx | 60 + src/components/room/EditRoomDialog.tsx | 75 + src/components/room/FunctionCallBadge.tsx | 56 + src/components/room/MentionPopover.tsx | 381 + src/components/room/MessageMentions.tsx | 320 + src/components/room/RoomAiAuthBanner.tsx | 125 + src/components/room/RoomAiTasksPanel.tsx | 268 + src/components/room/RoomChatInterface.tsx | 65 + src/components/room/RoomChatPanel.tsx | 599 + src/components/room/RoomList.tsx | 97 + src/components/room/RoomMentionPanel.tsx | 133 + src/components/room/RoomMessageActions.tsx | 98 + src/components/room/RoomMessageBubble.tsx | 624 + src/components/room/RoomMessageEditDialog.tsx | 79 + .../room/RoomMessageEditHistoryDialog.tsx | 116 + src/components/room/RoomMessageList.tsx | 409 + src/components/room/RoomMessageReactions.tsx | 125 + src/components/room/RoomMessageSearch.tsx | 164 + src/components/room/RoomParticipantsPanel.tsx | 128 + .../room/RoomPerformanceMonitor.tsx | 153 + src/components/room/RoomPinBar.tsx | 186 + src/components/room/RoomSettingsPanel.tsx | 378 + src/components/room/RoomThreadPanel.tsx | 209 + src/components/room/chatbotKitAdapter.ts | 21 + src/components/room/icon-match.tsx | 26 + src/components/room/index.ts | 14 + src/components/room/sender.ts | 35 + src/components/site-footer.tsx | 132 + src/components/ui/accordion.tsx | 72 + src/components/ui/alert-dialog.tsx | 185 + src/components/ui/alert.tsx | 76 + src/components/ui/aspect-ratio.tsx | 22 + src/components/ui/audio-visualizer.tsx | 194 + src/components/ui/avatar.tsx | 109 + src/components/ui/badge.tsx | 52 + src/components/ui/breadcrumb.tsx | 125 + src/components/ui/button-group.tsx | 87 + src/components/ui/button.tsx | 58 + src/components/ui/calendar.tsx | 221 + src/components/ui/card.tsx | 103 + src/components/ui/carousel.tsx | 240 + src/components/ui/chart.tsx | 373 + src/components/ui/chat-message.tsx | 398 + src/components/ui/chat.tsx | 337 + src/components/ui/checkbox.tsx | 29 + src/components/ui/collapsible.tsx | 21 + src/components/ui/combobox.tsx | 297 + src/components/ui/command.tsx | 194 + src/components/ui/context-menu.tsx | 271 + src/components/ui/copy-button.tsx | 44 + src/components/ui/dialog.tsx | 160 + src/components/ui/direction.tsx | 4 + src/components/ui/drawer.tsx | 134 + src/components/ui/dropdown-menu.tsx | 266 + src/components/ui/empty.tsx | 104 + src/components/ui/field.tsx | 238 + src/components/ui/file-preview.tsx | 151 + src/components/ui/hover-card.tsx | 51 + src/components/ui/input-group.tsx | 156 + src/components/ui/input-otp.tsx | 85 + src/components/ui/input.tsx | 20 + src/components/ui/interrupt-prompt.tsx | 41 + src/components/ui/item.tsx | 201 + src/components/ui/kbd.tsx | 26 + src/components/ui/label.tsx | 20 + src/components/ui/markdown-renderer.tsx | 193 + src/components/ui/menubar.tsx | 278 + src/components/ui/message-input.tsx | 462 + src/components/ui/message-list.tsx | 45 + src/components/ui/native-select.tsx | 52 + src/components/ui/navigation-menu.tsx | 168 + src/components/ui/pagination.tsx | 130 + src/components/ui/popover.tsx | 88 + src/components/ui/progress.tsx | 83 + src/components/ui/prompt-suggestions.tsx | 28 + src/components/ui/radio-group.tsx | 36 + src/components/ui/resizable.tsx | 50 + src/components/ui/scroll-area.tsx | 52 + src/components/ui/select.tsx | 201 + src/components/ui/separator.tsx | 23 + src/components/ui/sheet.tsx | 136 + src/components/ui/sidebar.tsx | 721 + src/components/ui/skeleton.tsx | 13 + src/components/ui/slider.tsx | 59 + src/components/ui/sonner.tsx | 47 + src/components/ui/spinner.tsx | 10 + src/components/ui/switch.tsx | 32 + src/components/ui/table.tsx | 114 + src/components/ui/tabs.tsx | 82 + src/components/ui/textarea.tsx | 18 + src/components/ui/toggle-group.tsx | 89 + src/components/ui/toggle.tsx | 42 + src/components/ui/tooltip.tsx | 66 + src/components/ui/typing-indicator.tsx | 15 + src/contexts/index.ts | 5 + src/contexts/project-context.tsx | 83 + src/contexts/repo-context.tsx | 44 + src/contexts/repository-context.tsx | 114 + src/contexts/room-context.tsx | 1107 + src/contexts/theme-context.tsx | 62 + src/contexts/user-context.tsx | 108 + src/contexts/workspace-context.tsx | 104 + src/hooks/use-audio-recording.ts | 93 + src/hooks/use-auto-scroll.ts | 73 + src/hooks/use-autosize-textarea.ts | 39 + src/hooks/use-copy-to-clipboard.ts | 36 + src/hooks/use-mobile.ts | 19 + src/hooks/use-sidebar-collapse.ts | 13 + src/hooks/useHead.ts | 79 + src/hooks/useRoomDraft.ts | 126 + src/hooks/useRoomWs.ts | 549 + src/index.css | 129 + src/lib/api-error.ts | 45 + src/lib/audio-utils.ts | 50 + src/lib/diffUtils.ts | 70 + src/lib/functionCallParser.ts | 41 + src/lib/room-ws-client.ts | 959 + src/lib/room.ts | 81 + src/lib/rsa.ts | 15 + src/lib/seo.ts | 30 + src/lib/storage/indexed-db.ts | 194 + src/lib/timezone.ts | 33 + src/lib/universal-ws.ts | 203 + src/lib/utils.ts | 6 + src/lib/validation.ts | 72 + src/lib/ws-protocol.ts | 398 + src/lib/ws-token.ts | 45 + src/main.tsx | 26 + tsconfig.app.json | 37 + tsconfig.json | 13 + tsconfig.node.json | 35 + vite.config.ts | 60 + 1046 files changed, 209174 insertions(+) create mode 100644 .agents/agents/code-reviewer.md create mode 100644 .claude/work.yaml create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 .idea/.gitignore create mode 100644 .idea/code.iml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 AGENT.md create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 README.md create mode 100644 apps/app/Cargo.toml create mode 100644 apps/app/src/args.rs create mode 100644 apps/app/src/logging.rs create mode 100644 apps/app/src/main.rs create mode 100644 apps/email/Cargo.toml create mode 100644 apps/email/src/main.rs create mode 100644 apps/git-hook/Cargo.toml create mode 100644 apps/git-hook/src/args.rs create mode 100644 apps/git-hook/src/main.rs create mode 100644 apps/gitserver/Cargo.toml create mode 100644 apps/gitserver/src/main.rs create mode 100644 apps/migrate/Cargo.toml create mode 100644 apps/migrate/src/main.rs create mode 100644 apps/operator/Cargo.toml create mode 100644 apps/operator/src/context.rs create mode 100644 apps/operator/src/controller/app.rs create mode 100644 apps/operator/src/controller/email_worker.rs create mode 100644 apps/operator/src/controller/git_hook.rs create mode 100644 apps/operator/src/controller/gitserver.rs create mode 100644 apps/operator/src/controller/helpers.rs create mode 100644 apps/operator/src/controller/migrate.rs create mode 100644 apps/operator/src/controller/mod.rs create mode 100644 apps/operator/src/crd.rs create mode 100644 apps/operator/src/lib.rs create mode 100644 apps/operator/src/main.rs create mode 100644 components.json create mode 100644 deploy/Chart.yaml create mode 100644 deploy/templates/NOTES.txt create mode 100644 deploy/templates/_helpers.tpl create mode 100644 deploy/templates/app-deployment.yaml create mode 100644 deploy/templates/configmap.yaml create mode 100644 deploy/templates/email-worker-deployment.yaml create mode 100644 deploy/templates/git-hook-deployment.yaml create mode 100644 deploy/templates/gitserver-deployment.yaml create mode 100644 deploy/templates/ingress.yaml create mode 100644 deploy/templates/migrate-job.yaml create mode 100644 deploy/templates/operator-deployment.yaml create mode 100644 deploy/templates/secret.yaml create mode 100644 deploy/values.yaml create mode 100644 docker/app.Dockerfile create mode 100644 docker/build.md create mode 100644 docker/build.sh create mode 100644 docker/crd/app-crd.yaml create mode 100644 docker/crd/email-worker-crd.yaml create mode 100644 docker/crd/git-hook-crd.yaml create mode 100644 docker/crd/gitserver-crd.yaml create mode 100644 docker/crd/migrate-crd.yaml create mode 100644 docker/email-worker.Dockerfile create mode 100644 docker/git-hook.Dockerfile create mode 100644 docker/gitserver.Dockerfile create mode 100644 docker/migrate.Dockerfile create mode 100644 docker/operator.Dockerfile create mode 100644 docker/operator/deployment.yaml create mode 100644 docker/operator/example/code-system.yaml create mode 100644 docs/ARCHITECTURE-LAYERS.md create mode 100644 eslint.config.js create mode 100644 index.html create mode 100644 libs/agent-tool-derive/Cargo.toml create mode 100644 libs/agent-tool-derive/src/lib.rs create mode 100644 libs/agent/Cargo.toml create mode 100644 libs/agent/chat/context.rs create mode 100644 libs/agent/chat/mod.rs create mode 100644 libs/agent/chat/service.rs create mode 100644 libs/agent/client.rs create mode 100644 libs/agent/compact/helpers.rs create mode 100644 libs/agent/compact/mod.rs create mode 100644 libs/agent/compact/service.rs create mode 100644 libs/agent/compact/types.rs create mode 100644 libs/agent/embed/client.rs create mode 100644 libs/agent/embed/mod.rs create mode 100644 libs/agent/embed/qdrant.rs create mode 100644 libs/agent/embed/service.rs create mode 100644 libs/agent/error.rs create mode 100644 libs/agent/lib.rs create mode 100644 libs/agent/perception/active.rs create mode 100644 libs/agent/perception/auto.rs create mode 100644 libs/agent/perception/mod.rs create mode 100644 libs/agent/perception/passive.rs create mode 100644 libs/agent/perception/vector.rs create mode 100644 libs/agent/react/hooks.rs create mode 100644 libs/agent/react/loop_core.rs create mode 100644 libs/agent/react/mod.rs create mode 100644 libs/agent/react/types.rs create mode 100644 libs/agent/task/mod.rs create mode 100644 libs/agent/task/service.rs create mode 100644 libs/agent/tokent.rs create mode 100644 libs/agent/tool/call.rs create mode 100644 libs/agent/tool/context.rs create mode 100644 libs/agent/tool/definition.rs create mode 100644 libs/agent/tool/examples.rs create mode 100644 libs/agent/tool/executor.rs create mode 100644 libs/agent/tool/mod.rs create mode 100644 libs/agent/tool/registry.rs create mode 100644 libs/api/Cargo.toml create mode 100644 libs/api/agent/code_review.rs create mode 100644 libs/api/agent/mod.rs create mode 100644 libs/api/agent/model.rs create mode 100644 libs/api/agent/model_capability.rs create mode 100644 libs/api/agent/model_parameter_profile.rs create mode 100644 libs/api/agent/model_pricing.rs create mode 100644 libs/api/agent/model_version.rs create mode 100644 libs/api/agent/pr_summary.rs create mode 100644 libs/api/agent/provider.rs create mode 100644 libs/api/auth/captcha.rs create mode 100644 libs/api/auth/email.rs create mode 100644 libs/api/auth/login.rs create mode 100644 libs/api/auth/logout.rs create mode 100644 libs/api/auth/me.rs create mode 100644 libs/api/auth/mod.rs create mode 100644 libs/api/auth/password.rs create mode 100644 libs/api/auth/register.rs create mode 100644 libs/api/auth/totp.rs create mode 100644 libs/api/auth/ws_token.rs create mode 100644 libs/api/error.rs create mode 100644 libs/api/gen_api.rs create mode 100644 libs/api/git/archive.rs create mode 100644 libs/api/git/blame.rs create mode 100644 libs/api/git/blob.rs create mode 100644 libs/api/git/branch.rs create mode 100644 libs/api/git/branch_protection.rs create mode 100644 libs/api/git/commit.rs create mode 100644 libs/api/git/contributors.rs create mode 100644 libs/api/git/diff.rs create mode 100644 libs/api/git/init.rs create mode 100644 libs/api/git/mod.rs create mode 100644 libs/api/git/refs.rs create mode 100644 libs/api/git/repo.rs create mode 100644 libs/api/git/star.rs create mode 100644 libs/api/git/tag.rs create mode 100644 libs/api/git/tree.rs create mode 100644 libs/api/git/watch.rs create mode 100644 libs/api/git/webhook.rs create mode 100644 libs/api/issue/assignee.rs create mode 100644 libs/api/issue/comment.rs create mode 100644 libs/api/issue/comment_reaction.rs create mode 100644 libs/api/issue/issue_label.rs create mode 100644 libs/api/issue/label.rs create mode 100644 libs/api/issue/mod.rs create mode 100644 libs/api/issue/pull_request.rs create mode 100644 libs/api/issue/reaction.rs create mode 100644 libs/api/issue/repo.rs create mode 100644 libs/api/issue/subscriber.rs create mode 100644 libs/api/lib.rs create mode 100644 libs/api/openapi.rs create mode 100644 libs/api/project/activity.rs create mode 100644 libs/api/project/audit.rs create mode 100644 libs/api/project/billing.rs create mode 100644 libs/api/project/board.rs create mode 100644 libs/api/project/info.rs create mode 100644 libs/api/project/init.rs create mode 100644 libs/api/project/invitation.rs create mode 100644 libs/api/project/join_answers.rs create mode 100644 libs/api/project/join_request.rs create mode 100644 libs/api/project/join_settings.rs create mode 100644 libs/api/project/labels.rs create mode 100644 libs/api/project/like.rs create mode 100644 libs/api/project/members.rs create mode 100644 libs/api/project/mod.rs create mode 100644 libs/api/project/repo.rs create mode 100644 libs/api/project/settings.rs create mode 100644 libs/api/project/transfer_repo.rs create mode 100644 libs/api/project/watch.rs create mode 100644 libs/api/pull_request/merge.rs create mode 100644 libs/api/pull_request/mod.rs create mode 100644 libs/api/pull_request/pull_request.rs create mode 100644 libs/api/pull_request/review.rs create mode 100644 libs/api/pull_request/review_comment.rs create mode 100644 libs/api/pull_request/review_request.rs create mode 100644 libs/api/room/ai.rs create mode 100644 libs/api/room/category.rs create mode 100644 libs/api/room/draft_and_history.rs create mode 100644 libs/api/room/member.rs create mode 100644 libs/api/room/message.rs create mode 100644 libs/api/room/mod.rs create mode 100644 libs/api/room/notification.rs create mode 100644 libs/api/room/pin.rs create mode 100644 libs/api/room/reaction.rs create mode 100644 libs/api/room/room.rs create mode 100644 libs/api/room/thread.rs create mode 100644 libs/api/room/ws.rs create mode 100644 libs/api/room/ws_handler.rs create mode 100644 libs/api/room/ws_types.rs create mode 100644 libs/api/room/ws_universal.rs create mode 100644 libs/api/route.rs create mode 100644 libs/api/search/mod.rs create mode 100644 libs/api/search/service.rs create mode 100644 libs/api/skill.rs create mode 100644 libs/api/user/access_key.rs create mode 100644 libs/api/user/chpc.rs create mode 100644 libs/api/user/mod.rs create mode 100644 libs/api/user/notification.rs create mode 100644 libs/api/user/preferences.rs create mode 100644 libs/api/user/profile.rs create mode 100644 libs/api/user/projects.rs create mode 100644 libs/api/user/repository.rs create mode 100644 libs/api/user/ssh_key.rs create mode 100644 libs/api/user/subscribe.rs create mode 100644 libs/api/user/user_info.rs create mode 100644 libs/api/workspace/billing.rs create mode 100644 libs/api/workspace/info.rs create mode 100644 libs/api/workspace/init.rs create mode 100644 libs/api/workspace/members.rs create mode 100644 libs/api/workspace/mod.rs create mode 100644 libs/api/workspace/projects.rs create mode 100644 libs/api/workspace/settings.rs create mode 100644 libs/api/workspace/stats.rs create mode 100644 libs/avatar/Cargo.toml create mode 100644 libs/avatar/lib.rs create mode 100644 libs/config/Cargo.toml create mode 100644 libs/config/ai.rs create mode 100644 libs/config/app.rs create mode 100644 libs/config/avatar.rs create mode 100644 libs/config/database.rs create mode 100644 libs/config/domain.rs create mode 100644 libs/config/embed.rs create mode 100644 libs/config/hook.rs create mode 100644 libs/config/lib.rs create mode 100644 libs/config/logs.rs create mode 100644 libs/config/qdrant.rs create mode 100644 libs/config/redis.rs create mode 100644 libs/config/smtp.rs create mode 100644 libs/config/ssh.rs create mode 100644 libs/db/Cargo.toml create mode 100644 libs/db/cache.rs create mode 100644 libs/db/database.rs create mode 100644 libs/db/lib.rs create mode 100644 libs/email/Cargo.toml create mode 100644 libs/email/lib.rs create mode 100644 libs/git/Cargo.toml create mode 100644 libs/git/archive/mod.rs create mode 100644 libs/git/archive/ops.rs create mode 100644 libs/git/archive/types.rs create mode 100644 libs/git/blame/mod.rs create mode 100644 libs/git/blame/ops.rs create mode 100644 libs/git/blob/mod.rs create mode 100644 libs/git/blob/ops.rs create mode 100644 libs/git/blob/types.rs create mode 100644 libs/git/branch/merge.rs create mode 100644 libs/git/branch/mod.rs create mode 100644 libs/git/branch/ops.rs create mode 100644 libs/git/branch/query.rs create mode 100644 libs/git/branch/types.rs create mode 100644 libs/git/commit/cherry_pick.rs create mode 100644 libs/git/commit/create.rs create mode 100644 libs/git/commit/graph.rs create mode 100644 libs/git/commit/meta.rs create mode 100644 libs/git/commit/mod.rs create mode 100644 libs/git/commit/query.rs create mode 100644 libs/git/commit/rebase.rs create mode 100644 libs/git/commit/revert.rs create mode 100644 libs/git/commit/traverse.rs create mode 100644 libs/git/commit/types.rs create mode 100644 libs/git/config/mod.rs create mode 100644 libs/git/config/ops.rs create mode 100644 libs/git/config/types.rs create mode 100644 libs/git/description/mod.rs create mode 100644 libs/git/diff/mod.rs create mode 100644 libs/git/diff/ops.rs create mode 100644 libs/git/diff/types.rs create mode 100644 libs/git/domain.rs create mode 100644 libs/git/error.rs create mode 100644 libs/git/hook/event.rs create mode 100644 libs/git/hook/mod.rs create mode 100644 libs/git/hook/pool/log.rs create mode 100644 libs/git/hook/pool/metrics.rs create mode 100644 libs/git/hook/pool/mod.rs create mode 100644 libs/git/hook/pool/redis.rs create mode 100644 libs/git/hook/pool/types.rs create mode 100644 libs/git/hook/sync/branch.rs create mode 100644 libs/git/hook/sync/commit.rs create mode 100644 libs/git/hook/sync/fsck.rs create mode 100644 libs/git/hook/sync/gc.rs create mode 100644 libs/git/hook/sync/lfs.rs create mode 100644 libs/git/hook/sync/lock.rs create mode 100644 libs/git/hook/sync/mod.rs create mode 100644 libs/git/hook/sync/remote.rs create mode 100644 libs/git/hook/sync/status.rs create mode 100644 libs/git/hook/sync/tag.rs create mode 100644 libs/git/hook/webhook_dispatch.rs create mode 100644 libs/git/http/auth.rs create mode 100644 libs/git/http/handler.rs create mode 100644 libs/git/http/lfs.rs create mode 100644 libs/git/http/lfs_routes.rs create mode 100644 libs/git/http/mod.rs create mode 100644 libs/git/http/rate_limit.rs create mode 100644 libs/git/http/routes.rs create mode 100644 libs/git/http/utils.rs create mode 100644 libs/git/lfs/mod.rs create mode 100644 libs/git/lfs/ops.rs create mode 100644 libs/git/lfs/types.rs create mode 100644 libs/git/lib.rs create mode 100644 libs/git/merge/mod.rs create mode 100644 libs/git/merge/ops.rs create mode 100644 libs/git/merge/types.rs create mode 100644 libs/git/ref_utils.rs create mode 100644 libs/git/reference/mod.rs create mode 100644 libs/git/reference/ops.rs create mode 100644 libs/git/reference/types.rs create mode 100644 libs/git/ssh/authz.rs create mode 100644 libs/git/ssh/handle.rs create mode 100644 libs/git/ssh/mod.rs create mode 100644 libs/git/ssh/rate_limit.rs create mode 100644 libs/git/ssh/server.rs create mode 100644 libs/git/tags/mod.rs create mode 100644 libs/git/tags/ops.rs create mode 100644 libs/git/tags/query.rs create mode 100644 libs/git/tags/types.rs create mode 100644 libs/git/tree/mod.rs create mode 100644 libs/git/tree/query.rs create mode 100644 libs/git/tree/types.rs create mode 100644 libs/migrate/Cargo.toml create mode 100644 libs/migrate/lib.rs create mode 100644 libs/migrate/m20250628_000001_create_room_notifications.rs create mode 100644 libs/migrate/m20250628_000002_create_user.rs create mode 100644 libs/migrate/m20250628_000003_create_user_2fa.rs create mode 100644 libs/migrate/m20250628_000004_create_user_activity_log.rs create mode 100644 libs/migrate/m20250628_000005_create_user_email.rs create mode 100644 libs/migrate/m20250628_000006_create_user_notification.rs create mode 100644 libs/migrate/m20250628_000007_create_user_password.rs create mode 100644 libs/migrate/m20250628_000008_create_user_password_reset.rs create mode 100644 libs/migrate/m20250628_000009_create_user_preferences.rs create mode 100644 libs/migrate/m20250628_000010_create_user_relation.rs create mode 100644 libs/migrate/m20250628_000011_create_user_ssh_key.rs create mode 100644 libs/migrate/m20250628_000012_create_user_token.rs create mode 100644 libs/migrate/m20250628_000013_create_project.rs create mode 100644 libs/migrate/m20250628_000014_create_project_access_log.rs create mode 100644 libs/migrate/m20250628_000015_create_project_audit_log.rs create mode 100644 libs/migrate/m20250628_000016_create_project_billing.rs create mode 100644 libs/migrate/m20250628_000017_create_project_billing_history.rs create mode 100644 libs/migrate/m20250628_000018_create_project_follow.rs create mode 100644 libs/migrate/m20250628_000019_create_project_history_name.rs create mode 100644 libs/migrate/m20250628_000020_create_project_label.rs create mode 100644 libs/migrate/m20250628_000021_create_project_like.rs create mode 100644 libs/migrate/m20250628_000022_create_project_member_invitations.rs create mode 100644 libs/migrate/m20250628_000023_create_project_member_join_answers.rs create mode 100644 libs/migrate/m20250628_000024_create_project_member_join_request.rs create mode 100644 libs/migrate/m20250628_000025_create_project_member_join_settings.rs create mode 100644 libs/migrate/m20250628_000026_create_project_members.rs create mode 100644 libs/migrate/m20250628_000027_create_project_watch.rs create mode 100644 libs/migrate/m20250628_000028_create_repo.rs create mode 100644 libs/migrate/m20250628_000029_create_repo_branch.rs create mode 100644 libs/migrate/m20250628_000030_create_repo_branch_protect.rs create mode 100644 libs/migrate/m20250628_000031_create_repo_collaborator.rs create mode 100644 libs/migrate/m20250628_000032_create_repo_commit.rs create mode 100644 libs/migrate/m20250628_000033_create_repo_fork.rs create mode 100644 libs/migrate/m20250628_000034_create_repo_history_name.rs create mode 100644 libs/migrate/m20250628_000035_create_repo_hook.rs create mode 100644 libs/migrate/m20250628_000036_create_repo_lfs_lock.rs create mode 100644 libs/migrate/m20250628_000037_create_repo_lfs_object.rs create mode 100644 libs/migrate/m20250628_000038_create_repo_lock.rs create mode 100644 libs/migrate/m20250628_000039_create_repo_star.rs create mode 100644 libs/migrate/m20250628_000040_create_repo_tag.rs create mode 100644 libs/migrate/m20250628_000041_create_repo_upstream.rs create mode 100644 libs/migrate/m20250628_000042_create_repo_watch.rs create mode 100644 libs/migrate/m20250628_000043_create_repo_webhook.rs create mode 100644 libs/migrate/m20250628_000044_create_issue.rs create mode 100644 libs/migrate/m20250628_000045_create_issue_assignee.rs create mode 100644 libs/migrate/m20250628_000046_create_issue_comment.rs create mode 100644 libs/migrate/m20250628_000047_create_issue_comment_reaction.rs create mode 100644 libs/migrate/m20250628_000048_create_issue_label.rs create mode 100644 libs/migrate/m20250628_000049_create_issue_pull_request.rs create mode 100644 libs/migrate/m20250628_000050_create_issue_reaction.rs create mode 100644 libs/migrate/m20250628_000051_create_issue_repo.rs create mode 100644 libs/migrate/m20250628_000052_create_issue_subscriber.rs create mode 100644 libs/migrate/m20250628_000053_create_pull_request.rs create mode 100644 libs/migrate/m20250628_000054_create_pull_request_commit.rs create mode 100644 libs/migrate/m20250628_000055_create_pull_request_review.rs create mode 100644 libs/migrate/m20250628_000056_create_pull_request_review_comment.rs create mode 100644 libs/migrate/m20250628_000057_create_room_category.rs create mode 100644 libs/migrate/m20250628_000058_create_room.rs create mode 100644 libs/migrate/m20250628_000059_create_room_ai.rs create mode 100644 libs/migrate/m20250628_000060_create_room_member.rs create mode 100644 libs/migrate/m20250628_000061_create_room_message.rs create mode 100644 libs/migrate/m20250628_000062_create_room_pin.rs create mode 100644 libs/migrate/m20250628_000063_create_room_thread.rs create mode 100644 libs/migrate/m20250628_000064_create_ai_model.rs create mode 100644 libs/migrate/m20250628_000065_create_ai_model_capability.rs create mode 100644 libs/migrate/m20250628_000066_create_ai_model_parameter_profile.rs create mode 100644 libs/migrate/m20250628_000067_create_ai_model_pricing.rs create mode 100644 libs/migrate/m20250628_000068_create_ai_model_provider.rs create mode 100644 libs/migrate/m20250628_000069_create_ai_model_version.rs create mode 100644 libs/migrate/m20250628_000070_create_ai_session.rs create mode 100644 libs/migrate/m20250628_000071_create_ai_tool_auth.rs create mode 100644 libs/migrate/m20250628_000072_create_ai_tool_call.rs create mode 100644 libs/migrate/m20250628_000073_create_label.rs create mode 100644 libs/migrate/m20250628_000074_create_notify.rs create mode 100644 libs/migrate/m20250628_000075_fix_column_types_and_names.rs create mode 100644 libs/migrate/m20250628_000076_create_user_email_change.rs create mode 100644 libs/migrate/m20250628_000077_create_project_activity.rs create mode 100644 libs/migrate/m20250628_000078_add_room_member_do_not_disturb.rs create mode 100644 libs/migrate/m20250628_000079_add_room_message_in_reply_to.rs create mode 100644 libs/migrate/m20250628_000080_add_message_reactions_and_search.rs create mode 100644 libs/migrate/m20250628_000081_add_message_edit_history.rs create mode 100644 libs/migrate/m20250628_000082_add_pr_review_comment_resolve.rs create mode 100644 libs/migrate/m20250628_000083_add_pr_review_request.rs create mode 100644 libs/migrate/m20260407_000001_extend_repo_branch_protect.rs create mode 100644 libs/migrate/m20260407_000002_create_project_board.rs create mode 100644 libs/migrate/m20260407_000003_add_repo_ai_code_review.rs create mode 100644 libs/migrate/m20260411_000001_create_workspace.rs create mode 100644 libs/migrate/m20260411_000002_create_workspace_membership.rs create mode 100644 libs/migrate/m20260411_000003_add_workspace_id_to_project.rs create mode 100644 libs/migrate/m20260411_000004_add_invite_token_to_workspace_membership.rs create mode 100644 libs/migrate/m20260412_000001_create_workspace_billing.rs create mode 100644 libs/migrate/m20260412_000002_create_workspace_billing_history.rs create mode 100644 libs/migrate/m20260412_000003_create_project_skill.rs create mode 100644 libs/migrate/m20260413_000001_add_skill_commit_blob.rs create mode 100644 libs/migrate/m20260414_000001_create_agent_task.rs create mode 100644 libs/migrate/sql/m20250628_000001_create_room_notifications.sql create mode 100644 libs/migrate/sql/m20250628_000002_create_user.sql create mode 100644 libs/migrate/sql/m20250628_000003_create_user_2fa.sql create mode 100644 libs/migrate/sql/m20250628_000004_create_user_activity_log.sql create mode 100644 libs/migrate/sql/m20250628_000005_create_user_email.sql create mode 100644 libs/migrate/sql/m20250628_000006_create_user_notification.sql create mode 100644 libs/migrate/sql/m20250628_000007_create_user_password.sql create mode 100644 libs/migrate/sql/m20250628_000008_create_user_password_reset.sql create mode 100644 libs/migrate/sql/m20250628_000009_create_user_preferences.sql create mode 100644 libs/migrate/sql/m20250628_000010_create_user_relation.sql create mode 100644 libs/migrate/sql/m20250628_000011_create_user_ssh_key.sql create mode 100644 libs/migrate/sql/m20250628_000012_create_user_token.sql create mode 100644 libs/migrate/sql/m20250628_000013_create_project.sql create mode 100644 libs/migrate/sql/m20250628_000014_create_project_access_log.sql create mode 100644 libs/migrate/sql/m20250628_000015_create_project_audit_log.sql create mode 100644 libs/migrate/sql/m20250628_000016_create_project_billing.sql create mode 100644 libs/migrate/sql/m20250628_000017_create_project_billing_history.sql create mode 100644 libs/migrate/sql/m20250628_000018_create_project_follow.sql create mode 100644 libs/migrate/sql/m20250628_000019_create_project_history_name.sql create mode 100644 libs/migrate/sql/m20250628_000020_create_project_label.sql create mode 100644 libs/migrate/sql/m20250628_000021_create_project_like.sql create mode 100644 libs/migrate/sql/m20250628_000022_create_project_member_invitations.sql create mode 100644 libs/migrate/sql/m20250628_000023_create_project_member_join_answers.sql create mode 100644 libs/migrate/sql/m20250628_000024_create_project_member_join_request.sql create mode 100644 libs/migrate/sql/m20250628_000025_create_project_member_join_settings.sql create mode 100644 libs/migrate/sql/m20250628_000026_create_project_members.sql create mode 100644 libs/migrate/sql/m20250628_000027_create_project_watch.sql create mode 100644 libs/migrate/sql/m20250628_000028_create_repo.sql create mode 100644 libs/migrate/sql/m20250628_000029_create_repo_branch.sql create mode 100644 libs/migrate/sql/m20250628_000030_create_repo_branch_protect.sql create mode 100644 libs/migrate/sql/m20250628_000031_create_repo_collaborator.sql create mode 100644 libs/migrate/sql/m20250628_000032_create_repo_commit.sql create mode 100644 libs/migrate/sql/m20250628_000033_create_repo_fork.sql create mode 100644 libs/migrate/sql/m20250628_000034_create_repo_history_name.sql create mode 100644 libs/migrate/sql/m20250628_000035_create_repo_hook.sql create mode 100644 libs/migrate/sql/m20250628_000036_create_repo_lfs_lock.sql create mode 100644 libs/migrate/sql/m20250628_000037_create_repo_lfs_object.sql create mode 100644 libs/migrate/sql/m20250628_000038_create_repo_lock.sql create mode 100644 libs/migrate/sql/m20250628_000039_create_repo_star.sql create mode 100644 libs/migrate/sql/m20250628_000040_create_repo_tag.sql create mode 100644 libs/migrate/sql/m20250628_000041_create_repo_upstream.sql create mode 100644 libs/migrate/sql/m20250628_000042_create_repo_watch.sql create mode 100644 libs/migrate/sql/m20250628_000043_create_repo_webhook.sql create mode 100644 libs/migrate/sql/m20250628_000044_create_issue.sql create mode 100644 libs/migrate/sql/m20250628_000045_create_issue_assignee.sql create mode 100644 libs/migrate/sql/m20250628_000046_create_issue_comment.sql create mode 100644 libs/migrate/sql/m20250628_000047_create_issue_comment_reaction.sql create mode 100644 libs/migrate/sql/m20250628_000048_create_issue_label.sql create mode 100644 libs/migrate/sql/m20250628_000049_create_issue_pull_request.sql create mode 100644 libs/migrate/sql/m20250628_000050_create_issue_reaction.sql create mode 100644 libs/migrate/sql/m20250628_000051_create_issue_repo.sql create mode 100644 libs/migrate/sql/m20250628_000052_create_issue_subscriber.sql create mode 100644 libs/migrate/sql/m20250628_000053_create_pull_request.sql create mode 100644 libs/migrate/sql/m20250628_000054_create_pull_request_commit.sql create mode 100644 libs/migrate/sql/m20250628_000055_create_pull_request_review.sql create mode 100644 libs/migrate/sql/m20250628_000056_create_pull_request_review_comment.sql create mode 100644 libs/migrate/sql/m20250628_000057_create_room_category.sql create mode 100644 libs/migrate/sql/m20250628_000058_create_room.sql create mode 100644 libs/migrate/sql/m20250628_000059_create_room_ai.sql create mode 100644 libs/migrate/sql/m20250628_000060_create_room_member.sql create mode 100644 libs/migrate/sql/m20250628_000061_create_room_message.sql create mode 100644 libs/migrate/sql/m20250628_000062_create_room_pin.sql create mode 100644 libs/migrate/sql/m20250628_000063_create_room_thread.sql create mode 100644 libs/migrate/sql/m20250628_000064_create_ai_model.sql create mode 100644 libs/migrate/sql/m20250628_000065_create_ai_model_capability.sql create mode 100644 libs/migrate/sql/m20250628_000066_create_ai_model_parameter_profile.sql create mode 100644 libs/migrate/sql/m20250628_000067_create_ai_model_pricing.sql create mode 100644 libs/migrate/sql/m20250628_000068_create_ai_model_provider.sql create mode 100644 libs/migrate/sql/m20250628_000069_create_ai_model_version.sql create mode 100644 libs/migrate/sql/m20250628_000070_create_ai_session.sql create mode 100644 libs/migrate/sql/m20250628_000071_create_ai_tool_auth.sql create mode 100644 libs/migrate/sql/m20250628_000072_create_ai_tool_call.sql create mode 100644 libs/migrate/sql/m20250628_000073_create_label.sql create mode 100644 libs/migrate/sql/m20250628_000074_create_notify.sql create mode 100644 libs/migrate/sql/m20250628_000075_fix_column_types_and_names.sql create mode 100644 libs/migrate/sql/m20250628_000076_create_user_email_change.sql create mode 100644 libs/migrate/sql/m20250628_000077_create_project_activity.sql create mode 100644 libs/migrate/sql/m20250628_000078_add_room_member_do_not_disturb.sql create mode 100644 libs/migrate/sql/m20250628_000079_add_room_message_in_reply_to.sql create mode 100644 libs/migrate/sql/m20250628_000080_add_message_reactions_and_search.sql create mode 100644 libs/migrate/sql/m20250628_000081_add_message_edit_history.sql create mode 100644 libs/migrate/sql/m20250628_000082_add_pr_review_comment_resolve.sql create mode 100644 libs/migrate/sql/m20250628_000083_add_pr_review_request.sql create mode 100644 libs/migrate/sql/m20260407_000001_extend_repo_branch_protect.sql create mode 100644 libs/migrate/sql/m20260407_000002_create_project_board.sql create mode 100644 libs/migrate/sql/m20260407_000003_add_repo_ai_code_review.sql create mode 100644 libs/migrate/sql/m20260411_000001_create_workspace.sql create mode 100644 libs/migrate/sql/m20260411_000002_create_workspace_membership.sql create mode 100644 libs/migrate/sql/m20260411_000003_add_workspace_id_to_project.sql create mode 100644 libs/migrate/sql/m20260411_000004_add_invite_token_to_workspace_membership.sql create mode 100644 libs/migrate/sql/m20260412_000001_create_workspace_billing.sql create mode 100644 libs/migrate/sql/m20260412_000002_create_workspace_billing_history.sql create mode 100644 libs/migrate/sql/m20260412_000003_create_project_skill.sql create mode 100644 libs/migrate/sql/m20260413_000001_add_skill_commit_blob.sql create mode 100644 libs/migrate/sql/m20260414_000001_create_agent_task.sql create mode 100644 libs/models/Cargo.toml create mode 100644 libs/models/agent_task/mod.rs create mode 100644 libs/models/agents/mod.rs create mode 100644 libs/models/agents/model.rs create mode 100644 libs/models/agents/model_capability.rs create mode 100644 libs/models/agents/model_parameter_profile.rs create mode 100644 libs/models/agents/model_pricing.rs create mode 100644 libs/models/agents/model_provider.rs create mode 100644 libs/models/agents/model_version.rs create mode 100644 libs/models/ai/ai_session.rs create mode 100644 libs/models/ai/ai_tool_auth.rs create mode 100644 libs/models/ai/ai_tool_call.rs create mode 100644 libs/models/ai/mod.rs create mode 100644 libs/models/issues/issue.rs create mode 100644 libs/models/issues/issue_assignee.rs create mode 100644 libs/models/issues/issue_comment.rs create mode 100644 libs/models/issues/issue_comment_reaction.rs create mode 100644 libs/models/issues/issue_label.rs create mode 100644 libs/models/issues/issue_pull_request.rs create mode 100644 libs/models/issues/issue_reaction.rs create mode 100644 libs/models/issues/issue_repo.rs create mode 100644 libs/models/issues/issue_subscriber.rs create mode 100644 libs/models/issues/mod.rs create mode 100644 libs/models/lib.rs create mode 100644 libs/models/projects/mod.rs create mode 100644 libs/models/projects/project.rs create mode 100644 libs/models/projects/project_access_log.rs create mode 100644 libs/models/projects/project_activity.rs create mode 100644 libs/models/projects/project_audit_log.rs create mode 100644 libs/models/projects/project_billing.rs create mode 100644 libs/models/projects/project_billing_history.rs create mode 100644 libs/models/projects/project_board.rs create mode 100644 libs/models/projects/project_board_card.rs create mode 100644 libs/models/projects/project_board_column.rs create mode 100644 libs/models/projects/project_follow.rs create mode 100644 libs/models/projects/project_history_name.rs create mode 100644 libs/models/projects/project_label.rs create mode 100644 libs/models/projects/project_like.rs create mode 100644 libs/models/projects/project_member_invitations.rs create mode 100644 libs/models/projects/project_member_join_answers.rs create mode 100644 libs/models/projects/project_member_join_request.rs create mode 100644 libs/models/projects/project_member_join_settings.rs create mode 100644 libs/models/projects/project_members.rs create mode 100644 libs/models/projects/project_skill.rs create mode 100644 libs/models/projects/project_watch.rs create mode 100644 libs/models/pull_request/mod.rs create mode 100644 libs/models/pull_request/pull_request.rs create mode 100644 libs/models/pull_request/pull_request_commit.rs create mode 100644 libs/models/pull_request/pull_request_review.rs create mode 100644 libs/models/pull_request/pull_request_review_comment.rs create mode 100644 libs/models/pull_request/pull_request_review_request.rs create mode 100644 libs/models/repos/mod.rs create mode 100644 libs/models/repos/repo.rs create mode 100644 libs/models/repos/repo_branch.rs create mode 100644 libs/models/repos/repo_branch_protect.rs create mode 100644 libs/models/repos/repo_collaborator.rs create mode 100644 libs/models/repos/repo_commit.rs create mode 100644 libs/models/repos/repo_fork.rs create mode 100644 libs/models/repos/repo_history_name.rs create mode 100644 libs/models/repos/repo_hook.rs create mode 100644 libs/models/repos/repo_lfs_lock.rs create mode 100644 libs/models/repos/repo_lfs_object.rs create mode 100644 libs/models/repos/repo_lock.rs create mode 100644 libs/models/repos/repo_star.rs create mode 100644 libs/models/repos/repo_tag.rs create mode 100644 libs/models/repos/repo_upstream.rs create mode 100644 libs/models/repos/repo_watch.rs create mode 100644 libs/models/repos/repo_webhook.rs create mode 100644 libs/models/rooms/mod.rs create mode 100644 libs/models/rooms/room.rs create mode 100644 libs/models/rooms/room_ai.rs create mode 100644 libs/models/rooms/room_category.rs create mode 100644 libs/models/rooms/room_member.rs create mode 100644 libs/models/rooms/room_message.rs create mode 100644 libs/models/rooms/room_message_edit_history.rs create mode 100644 libs/models/rooms/room_message_reaction.rs create mode 100644 libs/models/rooms/room_notifications.rs create mode 100644 libs/models/rooms/room_pin.rs create mode 100644 libs/models/rooms/room_thread.rs create mode 100644 libs/models/system/label.rs create mode 100644 libs/models/system/mod.rs create mode 100644 libs/models/system/notify.rs create mode 100644 libs/models/users/mod.rs create mode 100644 libs/models/users/user.rs create mode 100644 libs/models/users/user_2fa.rs create mode 100644 libs/models/users/user_activity_log.rs create mode 100644 libs/models/users/user_email.rs create mode 100644 libs/models/users/user_email_change.rs create mode 100644 libs/models/users/user_notification.rs create mode 100644 libs/models/users/user_password.rs create mode 100644 libs/models/users/user_password_reset.rs create mode 100644 libs/models/users/user_preferences.rs create mode 100644 libs/models/users/user_relation.rs create mode 100644 libs/models/users/user_ssh_key.rs create mode 100644 libs/models/users/user_token.rs create mode 100644 libs/models/workspaces/mod.rs create mode 100644 libs/models/workspaces/workspace.rs create mode 100644 libs/models/workspaces/workspace_billing.rs create mode 100644 libs/models/workspaces/workspace_billing_history.rs create mode 100644 libs/models/workspaces/workspace_membership.rs create mode 100644 libs/queue/Cargo.toml create mode 100644 libs/queue/lib.rs create mode 100644 libs/queue/producer.rs create mode 100644 libs/queue/types.rs create mode 100644 libs/queue/worker.rs create mode 100644 libs/room/Cargo.toml create mode 100644 libs/room/src/ai.rs create mode 100644 libs/room/src/category.rs create mode 100644 libs/room/src/connection.rs create mode 100644 libs/room/src/draft_and_history.rs create mode 100644 libs/room/src/error.rs create mode 100644 libs/room/src/helpers.rs create mode 100644 libs/room/src/lib.rs create mode 100644 libs/room/src/member.rs create mode 100644 libs/room/src/message.rs create mode 100644 libs/room/src/metrics.rs create mode 100644 libs/room/src/notification.rs create mode 100644 libs/room/src/pin.rs create mode 100644 libs/room/src/reaction.rs create mode 100644 libs/room/src/room.rs create mode 100644 libs/room/src/room_ai_queue.rs create mode 100644 libs/room/src/search.rs create mode 100644 libs/room/src/service.rs create mode 100644 libs/room/src/thread.rs create mode 100644 libs/room/src/types.rs create mode 100644 libs/room/src/ws_context.rs create mode 100644 libs/rpc/Cargo.toml create mode 100644 libs/rpc/lib.rs create mode 100644 libs/service/Cargo.toml create mode 100644 libs/service/agent/billing.rs create mode 100644 libs/service/agent/code_review.rs create mode 100644 libs/service/agent/mod.rs create mode 100644 libs/service/agent/model.rs create mode 100644 libs/service/agent/model_capability.rs create mode 100644 libs/service/agent/model_parameter_profile.rs create mode 100644 libs/service/agent/model_pricing.rs create mode 100644 libs/service/agent/model_version.rs create mode 100644 libs/service/agent/pr_summary.rs create mode 100644 libs/service/agent/provider.rs create mode 100644 libs/service/agent/sync.rs create mode 100644 libs/service/auth/captcha.rs create mode 100644 libs/service/auth/email.rs create mode 100644 libs/service/auth/login.rs create mode 100644 libs/service/auth/logout.rs create mode 100644 libs/service/auth/me.rs create mode 100644 libs/service/auth/mod.rs create mode 100644 libs/service/auth/password.rs create mode 100644 libs/service/auth/register.rs create mode 100644 libs/service/auth/rsa.rs create mode 100644 libs/service/auth/totp.rs create mode 100644 libs/service/error.rs create mode 100644 libs/service/git/archive.rs create mode 100644 libs/service/git/blame.rs create mode 100644 libs/service/git/blob.rs create mode 100644 libs/service/git/blocking.rs create mode 100644 libs/service/git/branch.rs create mode 100644 libs/service/git/branch_protection.rs create mode 100644 libs/service/git/commit.rs create mode 100644 libs/service/git/contributors.rs create mode 100644 libs/service/git/diff.rs create mode 100644 libs/service/git/init.rs create mode 100644 libs/service/git/mod.rs create mode 100644 libs/service/git/refs.rs create mode 100644 libs/service/git/repo.rs create mode 100644 libs/service/git/star.rs create mode 100644 libs/service/git/tag.rs create mode 100644 libs/service/git/tree.rs create mode 100644 libs/service/git/watch.rs create mode 100644 libs/service/git/webhook.rs create mode 100644 libs/service/issue/assignee.rs create mode 100644 libs/service/issue/comment.rs create mode 100644 libs/service/issue/issue.rs create mode 100644 libs/service/issue/label.rs create mode 100644 libs/service/issue/mod.rs create mode 100644 libs/service/issue/pull_request.rs create mode 100644 libs/service/issue/reaction.rs create mode 100644 libs/service/issue/repo.rs create mode 100644 libs/service/issue/subscriber.rs create mode 100644 libs/service/lib.rs create mode 100644 libs/service/project/activity.rs create mode 100644 libs/service/project/audit.rs create mode 100644 libs/service/project/avatar.rs create mode 100644 libs/service/project/billing.rs create mode 100644 libs/service/project/board.rs create mode 100644 libs/service/project/can_use.rs create mode 100644 libs/service/project/info.rs create mode 100644 libs/service/project/init.rs create mode 100644 libs/service/project/invitation.rs create mode 100644 libs/service/project/join_answers.rs create mode 100644 libs/service/project/join_request.rs create mode 100644 libs/service/project/join_settings.rs create mode 100644 libs/service/project/labels.rs create mode 100644 libs/service/project/like.rs create mode 100644 libs/service/project/members.rs create mode 100644 libs/service/project/mod.rs create mode 100644 libs/service/project/repo.rs create mode 100644 libs/service/project/repo_permission.rs create mode 100644 libs/service/project/settings.rs create mode 100644 libs/service/project/standard.rs create mode 100644 libs/service/project/transfer_repo.rs create mode 100644 libs/service/project/watch.rs create mode 100644 libs/service/pull_request/merge.rs create mode 100644 libs/service/pull_request/mod.rs create mode 100644 libs/service/pull_request/pull_request.rs create mode 100644 libs/service/pull_request/review.rs create mode 100644 libs/service/pull_request/review_comment.rs create mode 100644 libs/service/pull_request/review_request.rs create mode 100644 libs/service/search/mod.rs create mode 100644 libs/service/search/service.rs create mode 100644 libs/service/skill/info.rs create mode 100644 libs/service/skill/manage.rs create mode 100644 libs/service/skill/mod.rs create mode 100644 libs/service/skill/scan.rs create mode 100644 libs/service/skill/scanner.rs create mode 100644 libs/service/user/access_key.rs create mode 100644 libs/service/user/avatar.rs create mode 100644 libs/service/user/chpc.rs create mode 100644 libs/service/user/mod.rs create mode 100644 libs/service/user/notification.rs create mode 100644 libs/service/user/notify.rs create mode 100644 libs/service/user/preferences.rs create mode 100644 libs/service/user/profile.rs create mode 100644 libs/service/user/projects.rs create mode 100644 libs/service/user/repository.rs create mode 100644 libs/service/user/ssh_key.rs create mode 100644 libs/service/user/subscribe.rs create mode 100644 libs/service/user/user_info.rs create mode 100644 libs/service/utils/mod.rs create mode 100644 libs/service/utils/project.rs create mode 100644 libs/service/utils/repo.rs create mode 100644 libs/service/utils/user.rs create mode 100644 libs/service/utils/workspace.rs create mode 100644 libs/service/webhook_dispatch.rs create mode 100644 libs/service/workspace/billing.rs create mode 100644 libs/service/workspace/info.rs create mode 100644 libs/service/workspace/init.rs create mode 100644 libs/service/workspace/members.rs create mode 100644 libs/service/workspace/mod.rs create mode 100644 libs/service/workspace/settings.rs create mode 100644 libs/service/ws_token.rs create mode 100644 libs/session/Cargo.toml create mode 100644 libs/session/config.rs create mode 100644 libs/session/lib.rs create mode 100644 libs/session/middleware.rs create mode 100644 libs/session/session.rs create mode 100644 libs/session/session_ext.rs create mode 100644 libs/session/storage/format.rs create mode 100644 libs/session/storage/interface.rs create mode 100644 libs/session/storage/mod.rs create mode 100644 libs/session/storage/redis_cluster.rs create mode 100644 libs/session/storage/session_key.rs create mode 100644 libs/session/storage/utils.rs create mode 100644 libs/transport/Cargo.toml create mode 100644 libs/transport/lib.rs create mode 100644 libs/webhook/Cargo.toml create mode 100644 libs/webhook/lib.rs create mode 100644 openapi-ts.config.ts create mode 100644 openapi.json create mode 100644 openspec/config.yaml create mode 100644 package.json create mode 100644 pnpm-lock.yaml create mode 100644 public/logo.png create mode 100644 scripts/fix-openapi-tags.js create mode 100644 scripts/gen-client.js create mode 100644 src/App.css create mode 100644 src/App.tsx create mode 100644 src/app/auth/accept-workspace-invite-page.tsx create mode 100644 src/app/auth/index.ts create mode 100644 src/app/auth/login-page.tsx create mode 100644 src/app/auth/password-reset-page.tsx create mode 100644 src/app/auth/register-page.tsx create mode 100644 src/app/auth/verify-email-page.tsx create mode 100644 src/app/init/project.tsx create mode 100644 src/app/init/repository.tsx create mode 100644 src/app/init/workspace.tsx create mode 100644 src/app/notify/layout.tsx create mode 100644 src/app/notify/page.tsx create mode 100644 src/app/page.tsx create mode 100644 src/app/project/activity.tsx create mode 100644 src/app/project/articles.tsx create mode 100644 src/app/project/boards.tsx create mode 100644 src/app/project/boards/[boardId].tsx create mode 100644 src/app/project/issue-detail.tsx create mode 100644 src/app/project/issue-edit.tsx create mode 100644 src/app/project/issue-new.tsx create mode 100644 src/app/project/issues.tsx create mode 100644 src/app/project/issues/board-card.tsx create mode 100644 src/app/project/issues/board-column.tsx create mode 100644 src/app/project/issues/config.ts create mode 100644 src/app/project/issues/list-row.tsx create mode 100644 src/app/project/issues/view-store.ts create mode 100644 src/app/project/layout.tsx create mode 100644 src/app/project/member.tsx create mode 100644 src/app/project/overview.tsx create mode 100644 src/app/project/repo/branches.tsx create mode 100644 src/app/project/repo/commits.tsx create mode 100644 src/app/project/repo/contributors.tsx create mode 100644 src/app/project/repo/files.tsx create mode 100644 src/app/project/repo/layout.tsx create mode 100644 src/app/project/repo/overview.tsx create mode 100644 src/app/project/repo/pull-request-detail.tsx create mode 100644 src/app/project/repo/pull-request-new.tsx create mode 100644 src/app/project/repo/pull-requests.tsx create mode 100644 src/app/project/repo/settings.tsx create mode 100644 src/app/project/repo/tags.tsx create mode 100644 src/app/project/repositories.tsx create mode 100644 src/app/project/resources.tsx create mode 100644 src/app/project/room.tsx create mode 100644 src/app/project/settings.tsx create mode 100644 src/app/project/settings/billing.tsx create mode 100644 src/app/project/settings/general.tsx create mode 100644 src/app/project/settings/labels.tsx create mode 100644 src/app/project/settings/members.tsx create mode 100644 src/app/project/settings/oauth.tsx create mode 100644 src/app/project/settings/skills.tsx create mode 100644 src/app/project/settings/webhook.tsx create mode 100644 src/app/project/skills-init.tsx create mode 100644 src/app/project/types.ts create mode 100644 src/app/repository/branches.tsx create mode 100644 src/app/repository/commit-diff.tsx create mode 100644 src/app/repository/commits.tsx create mode 100644 src/app/repository/contributors.tsx create mode 100644 src/app/repository/files.tsx create mode 100644 src/app/repository/layout.tsx create mode 100644 src/app/repository/overview.tsx create mode 100644 src/app/repository/pull-request-detail.tsx create mode 100644 src/app/repository/pull-request-new.tsx create mode 100644 src/app/repository/pull-requests.tsx create mode 100644 src/app/repository/settings.tsx create mode 100644 src/app/repository/settings/archive.tsx create mode 100644 src/app/repository/settings/branches.tsx create mode 100644 src/app/repository/settings/general.tsx create mode 100644 src/app/repository/settings/layout.tsx create mode 100644 src/app/repository/settings/members.tsx create mode 100644 src/app/repository/settings/tags.tsx create mode 100644 src/app/repository/settings/webhooks.tsx create mode 100644 src/app/repository/tags.tsx create mode 100644 src/app/search/page.tsx create mode 100644 src/app/settings/account.tsx create mode 100644 src/app/settings/activity.tsx create mode 100644 src/app/settings/layout.tsx create mode 100644 src/app/settings/preferences.tsx create mode 100644 src/app/settings/profile.tsx create mode 100644 src/app/settings/security.tsx create mode 100644 src/app/settings/ssh-keys.tsx create mode 100644 src/app/settings/tokens.tsx create mode 100644 src/app/user/user.tsx create mode 100644 src/app/workspace/billing.tsx create mode 100644 src/app/workspace/layout.tsx create mode 100644 src/app/workspace/members.tsx create mode 100644 src/app/workspace/overview.tsx create mode 100644 src/app/workspace/projects.tsx create mode 100644 src/app/workspace/redirect.tsx create mode 100644 src/app/workspace/settings.tsx create mode 100644 src/assets/hero.png create mode 100644 src/assets/react.svg create mode 100644 src/assets/vite.svg create mode 100644 src/client/client.gen.ts create mode 100644 src/client/client/client.gen.ts create mode 100644 src/client/client/index.ts create mode 100644 src/client/client/types.gen.ts create mode 100644 src/client/client/utils.gen.ts create mode 100644 src/client/core/auth.gen.ts create mode 100644 src/client/core/bodySerializer.gen.ts create mode 100644 src/client/core/params.gen.ts create mode 100644 src/client/core/pathSerializer.gen.ts create mode 100644 src/client/core/queryKeySerializer.gen.ts create mode 100644 src/client/core/serverSentEvents.gen.ts create mode 100644 src/client/core/types.gen.ts create mode 100644 src/client/core/utils.gen.ts create mode 100644 src/client/index.ts create mode 100644 src/client/sdk.gen.ts create mode 100644 src/client/types.gen.ts create mode 100644 src/components/auth/auth-layout.tsx create mode 100644 src/components/auth/captcha-image.tsx create mode 100644 src/components/auth/index.ts create mode 100644 src/components/auth/protected-route.tsx create mode 100644 src/components/init-layout.tsx create mode 100644 src/components/landing/index.ts create mode 100644 src/components/landing/landing-footer.tsx create mode 100644 src/components/landing/landing-nav.tsx create mode 100644 src/components/landing/landing-sections.tsx create mode 100644 src/components/layout/sidebar-system.tsx create mode 100644 src/components/layout/sidebar-user.tsx create mode 100644 src/components/layout/workspace-sidebar.tsx create mode 100644 src/components/project/KanbanBoard.tsx create mode 100644 src/components/project/KanbanCard.tsx create mode 100644 src/components/project/KanbanColumn.tsx create mode 100644 src/components/project/sidebar.tsx create mode 100644 src/components/repository/PRCommentInput.tsx create mode 100644 src/components/repository/PRCommitList.tsx create mode 100644 src/components/repository/PRConversation.tsx create mode 100644 src/components/repository/PRDiffViewer.tsx create mode 100644 src/components/repository/PRInlineComment.tsx create mode 100644 src/components/repository/PRMergeBox.tsx create mode 100644 src/components/repository/file-browser.tsx create mode 100644 src/components/repository/header.tsx create mode 100644 src/components/repository/sidebar.tsx create mode 100644 src/components/room/CreateRoomDialog.tsx create mode 100644 src/components/room/DeleteRoomAlert.tsx create mode 100644 src/components/room/EditRoomDialog.tsx create mode 100644 src/components/room/FunctionCallBadge.tsx create mode 100644 src/components/room/MentionPopover.tsx create mode 100644 src/components/room/MessageMentions.tsx create mode 100644 src/components/room/RoomAiAuthBanner.tsx create mode 100644 src/components/room/RoomAiTasksPanel.tsx create mode 100644 src/components/room/RoomChatInterface.tsx create mode 100644 src/components/room/RoomChatPanel.tsx create mode 100644 src/components/room/RoomList.tsx create mode 100644 src/components/room/RoomMentionPanel.tsx create mode 100644 src/components/room/RoomMessageActions.tsx create mode 100644 src/components/room/RoomMessageBubble.tsx create mode 100644 src/components/room/RoomMessageEditDialog.tsx create mode 100644 src/components/room/RoomMessageEditHistoryDialog.tsx create mode 100644 src/components/room/RoomMessageList.tsx create mode 100644 src/components/room/RoomMessageReactions.tsx create mode 100644 src/components/room/RoomMessageSearch.tsx create mode 100644 src/components/room/RoomParticipantsPanel.tsx create mode 100644 src/components/room/RoomPerformanceMonitor.tsx create mode 100644 src/components/room/RoomPinBar.tsx create mode 100644 src/components/room/RoomSettingsPanel.tsx create mode 100644 src/components/room/RoomThreadPanel.tsx create mode 100644 src/components/room/chatbotKitAdapter.ts create mode 100644 src/components/room/icon-match.tsx create mode 100644 src/components/room/index.ts create mode 100644 src/components/room/sender.ts create mode 100644 src/components/site-footer.tsx create mode 100644 src/components/ui/accordion.tsx create mode 100644 src/components/ui/alert-dialog.tsx create mode 100644 src/components/ui/alert.tsx create mode 100644 src/components/ui/aspect-ratio.tsx create mode 100644 src/components/ui/audio-visualizer.tsx create mode 100644 src/components/ui/avatar.tsx create mode 100644 src/components/ui/badge.tsx create mode 100644 src/components/ui/breadcrumb.tsx create mode 100644 src/components/ui/button-group.tsx create mode 100644 src/components/ui/button.tsx create mode 100644 src/components/ui/calendar.tsx create mode 100644 src/components/ui/card.tsx create mode 100644 src/components/ui/carousel.tsx create mode 100644 src/components/ui/chart.tsx create mode 100644 src/components/ui/chat-message.tsx create mode 100644 src/components/ui/chat.tsx create mode 100644 src/components/ui/checkbox.tsx create mode 100644 src/components/ui/collapsible.tsx create mode 100644 src/components/ui/combobox.tsx create mode 100644 src/components/ui/command.tsx create mode 100644 src/components/ui/context-menu.tsx create mode 100644 src/components/ui/copy-button.tsx create mode 100644 src/components/ui/dialog.tsx create mode 100644 src/components/ui/direction.tsx create mode 100644 src/components/ui/drawer.tsx create mode 100644 src/components/ui/dropdown-menu.tsx create mode 100644 src/components/ui/empty.tsx create mode 100644 src/components/ui/field.tsx create mode 100644 src/components/ui/file-preview.tsx create mode 100644 src/components/ui/hover-card.tsx create mode 100644 src/components/ui/input-group.tsx create mode 100644 src/components/ui/input-otp.tsx create mode 100644 src/components/ui/input.tsx create mode 100644 src/components/ui/interrupt-prompt.tsx create mode 100644 src/components/ui/item.tsx create mode 100644 src/components/ui/kbd.tsx create mode 100644 src/components/ui/label.tsx create mode 100644 src/components/ui/markdown-renderer.tsx create mode 100644 src/components/ui/menubar.tsx create mode 100644 src/components/ui/message-input.tsx create mode 100644 src/components/ui/message-list.tsx create mode 100644 src/components/ui/native-select.tsx create mode 100644 src/components/ui/navigation-menu.tsx create mode 100644 src/components/ui/pagination.tsx create mode 100644 src/components/ui/popover.tsx create mode 100644 src/components/ui/progress.tsx create mode 100644 src/components/ui/prompt-suggestions.tsx create mode 100644 src/components/ui/radio-group.tsx create mode 100644 src/components/ui/resizable.tsx create mode 100644 src/components/ui/scroll-area.tsx create mode 100644 src/components/ui/select.tsx create mode 100644 src/components/ui/separator.tsx create mode 100644 src/components/ui/sheet.tsx create mode 100644 src/components/ui/sidebar.tsx create mode 100644 src/components/ui/skeleton.tsx create mode 100644 src/components/ui/slider.tsx create mode 100644 src/components/ui/sonner.tsx create mode 100644 src/components/ui/spinner.tsx create mode 100644 src/components/ui/switch.tsx create mode 100644 src/components/ui/table.tsx create mode 100644 src/components/ui/tabs.tsx create mode 100644 src/components/ui/textarea.tsx create mode 100644 src/components/ui/toggle-group.tsx create mode 100644 src/components/ui/toggle.tsx create mode 100644 src/components/ui/tooltip.tsx create mode 100644 src/components/ui/typing-indicator.tsx create mode 100644 src/contexts/index.ts create mode 100644 src/contexts/project-context.tsx create mode 100644 src/contexts/repo-context.tsx create mode 100644 src/contexts/repository-context.tsx create mode 100644 src/contexts/room-context.tsx create mode 100644 src/contexts/theme-context.tsx create mode 100644 src/contexts/user-context.tsx create mode 100644 src/contexts/workspace-context.tsx create mode 100644 src/hooks/use-audio-recording.ts create mode 100644 src/hooks/use-auto-scroll.ts create mode 100644 src/hooks/use-autosize-textarea.ts create mode 100644 src/hooks/use-copy-to-clipboard.ts create mode 100644 src/hooks/use-mobile.ts create mode 100644 src/hooks/use-sidebar-collapse.ts create mode 100644 src/hooks/useHead.ts create mode 100644 src/hooks/useRoomDraft.ts create mode 100644 src/hooks/useRoomWs.ts create mode 100644 src/index.css create mode 100644 src/lib/api-error.ts create mode 100644 src/lib/audio-utils.ts create mode 100644 src/lib/diffUtils.ts create mode 100644 src/lib/functionCallParser.ts create mode 100644 src/lib/room-ws-client.ts create mode 100644 src/lib/room.ts create mode 100644 src/lib/rsa.ts create mode 100644 src/lib/seo.ts create mode 100644 src/lib/storage/indexed-db.ts create mode 100644 src/lib/timezone.ts create mode 100644 src/lib/universal-ws.ts create mode 100644 src/lib/utils.ts create mode 100644 src/lib/validation.ts create mode 100644 src/lib/ws-protocol.ts create mode 100644 src/lib/ws-token.ts create mode 100644 src/main.tsx create mode 100644 tsconfig.app.json create mode 100644 tsconfig.json create mode 100644 tsconfig.node.json create mode 100644 vite.config.ts diff --git a/.agents/agents/code-reviewer.md b/.agents/agents/code-reviewer.md new file mode 100644 index 0000000..fd936f4 --- /dev/null +++ b/.agents/agents/code-reviewer.md @@ -0,0 +1,42 @@ + +You are a Senior Code Reviewer with expertise in software architecture, design patterns, and best practices. Your role is to review completed project steps against original plans and ensure code quality standards are met. + +When reviewing completed work, you will: + +1. **Plan Alignment Analysis**: + - Compare the implementation against the original planning document or step description + - Identify any deviations from the planned approach, architecture, or requirements + - Assess whether deviations are justified improvements or problematic departures + - Verify that all planned functionality has been implemented + +2. **Code Quality Assessment**: + - Review code for adherence to established patterns and conventions + - Check for proper error handling, type safety, and defensive programming + - Evaluate code organization, naming conventions, and maintainability + - Assess test coverage and quality of test implementations + - Look for potential security vulnerabilities or performance issues + +3. **Architecture and Design Review**: + - Ensure the implementation follows SOLID principles and established architectural patterns + - Check for proper separation of concerns and loose coupling + - Verify that the code integrates well with existing systems + - Assess scalability and extensibility considerations + +4. **Documentation and Standards**: + - Verify that code includes appropriate comments and documentation + - Check that file headers, function documentation, and inline comments are present and accurate + - Ensure adherence to project-specific coding standards and conventions + +5. **Issue Identification and Recommendations**: + - Clearly categorize issues as: Critical (must fix), Important (should fix), or Suggestions (nice to have) + - For each issue, provide specific examples and actionable recommendations + - When you identify plan deviations, explain whether they're problematic or beneficial + - Suggest specific improvements with code examples when helpful + +6. **Communication Protocol**: + - If you find significant deviations from the plan, ask the coding agent to review and confirm the changes + - If you identify issues with the original plan itself, recommend plan updates + - For implementation problems, provide clear guidance on fixes needed + - Always acknowledge what was done well before highlighting issues + +Your output should be structured, actionable, and focused on helping maintain high code quality while ensuring project goals are met. Be thorough but concise, and always provide constructive feedback that helps improve both the current implementation and future development practices. diff --git a/.claude/work.yaml b/.claude/work.yaml new file mode 100644 index 0000000..476b5fe --- /dev/null +++ b/.claude/work.yaml @@ -0,0 +1,4 @@ +list: + - "优化libs/api/error.rs 为标准错误 (Branch)" + - "2d76bf69-9ca9-4da9-b693-31005c5c4c0d" + - "c3f51440-f6ee-483a-8887-556e94f0897f" \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..aa068ac --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +target/ +.git/ +.idea/ +.vscode/ +node_modules/ +*.log +.env +.env.local +.env.*.local diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..a55d1b7 --- /dev/null +++ b/.env.example @@ -0,0 +1,109 @@ +# ============================================================================= +# Required - 程序启动必须配置 +# ============================================================================= + +# 数据库连接 +APP_DATABASE_URL=postgresql://user:password@localhost:5432/dbname +APP_DATABASE_SCHEMA_SEARCH_PATH=public + +# Redis(支持多节点,逗号分隔) +APP_REDIS_URL=redis://localhost:6379 +# APP_REDIS_URLS=redis://localhost:6379,redis://localhost:6378 + +# AI 服务 +APP_AI_BASIC_URL=https://api.openai.com/v1 +APP_AI_API_KEY=sk-xxxxx + +# Embedding + 向量检索 +APP_EMBED_MODEL_BASE_URL=https://api.openai.com/v1 +APP_EMBED_MODEL_API_KEY=sk-xxxxx +APP_EMBED_MODEL_NAME=text-embedding-3-small +APP_EMBED_MODEL_DIMENSIONS=1536 +APP_QDRANT_URL=http://localhost:6333 +# APP_QDRANT_API_KEY= + +# SMTP 邮件 +APP_SMTP_HOST=smtp.example.com +APP_SMTP_PORT=587 +APP_SMTP_USERNAME=noreply@example.com +APP_SMTP_PASSWORD=xxxxx +APP_SMTP_FROM=noreply@example.com +APP_SMTP_TLS=true +APP_SMTP_TIMEOUT=30 + +# 文件存储 +APP_AVATAR_PATH=/data/avatars +# Git 仓库存储根目录 +APP_REPOS_ROOT=/data/repos + +# ============================================================================= +# Domain / URL(可选,有默认值) +# ============================================================================= + +APP_DOMAIN_URL=http://127.0.0.1 +# APP_STATIC_DOMAIN= +# APP_MEDIA_DOMAIN= +# APP_GIT_HTTP_DOMAIN= + +# ============================================================================= +# Database Pool(可选,有默认值) +# ============================================================================= + +# APP_DATABASE_MAX_CONNECTIONS=10 +# APP_DATABASE_MIN_CONNECTIONS=2 +# APP_DATABASE_IDLE_TIMEOUT=60000 +# APP_DATABASE_MAX_LIFETIME=300000 +# APP_DATABASE_CONNECTION_TIMEOUT=5000 +# APP_DATABASE_REPLICAS= +# APP_DATABASE_HEALTH_CHECK_INTERVAL=30 +# APP_DATABASE_RETRY_ATTEMPTS=3 +# APP_DATABASE_RETRY_DELAY=5 + +# ============================================================================= +# Redis Pool(可选,有默认值) +# ============================================================================= + +# APP_REDIS_POOL_SIZE=10 +# APP_REDIS_CONNECT_TIMEOUT=5 +# APP_REDIS_ACQUIRE_TIMEOUT=5 + +# ============================================================================= +# SSH(可选,有默认值) +# ============================================================================= + +# APP_SSH_DOMAIN= +# APP_SSH_PORT=22 +# APP_SSH_SERVER_PRIVATE_KEY= +# APP_SSH_SERVER_PUBLIC_KEY= + +# ============================================================================= +# Logging(可选,有默认值) +# ============================================================================= + +# APP_LOG_LEVEL=info +# APP_LOG_FORMAT=json +# APP_LOG_FILE_ENABLED=false +# APP_LOG_FILE_PATH=./logs +# APP_LOG_FILE_ROTATION=daily +# APP_LOG_FILE_MAX_FILES=7 +# APP_LOG_FILE_MAX_SIZE=104857600 + +# OpenTelemetry(可选,默认关闭) +# APP_OTEL_ENABLED=false +# APP_OTEL_ENDPOINT=http://localhost:5080/api/default/v1/traces +# APP_OTEL_SERVICE_NAME= +# APP_OTEL_SERVICE_VERSION= +# APP_OTEL_AUTHORIZATION= +# APP_OTEL_ORGANIZATION= + +# ============================================================================= +# NATS / Hook Pool(可选,有默认值) +# ============================================================================= + +# HOOK_POOL_MAX_CONCURRENT=(CPU 核数) +# HOOK_POOL_CPU_THRESHOLD=80.0 +# HOOK_POOL_REDIS_LIST_PREFIX={hook} +# HOOK_POOL_REDIS_LOG_CHANNEL=hook:logs +# HOOK_POOL_REDIS_BLOCK_TIMEOUT=5 +# HOOK_POOL_REDIS_MAX_RETRIES=3 +# HOOK_POOL_WORKER_ID=(随机 UUID) diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5e3ce22 --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +/target +node_modules +.claude +.zed +.vscode +.idea +.env +.env.local +dist +.codex +.qwen +.opencode +.omc +AGENT.md +ARCHITECTURE.md diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..b6b1ecf --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,10 @@ +# 默认忽略的文件 +/shelf/ +/workspace.xml +# 已忽略包含查询文件的默认文件夹 +/queries/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml +# 基于编辑器的 HTTP 客户端请求 +/httpRequests/ diff --git a/.idea/code.iml b/.idea/code.iml new file mode 100644 index 0000000..2961ea3 --- /dev/null +++ b/.idea/code.iml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..23968dc --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..35eb1dd --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/AGENT.md b/AGENT.md new file mode 100644 index 0000000..3959737 --- /dev/null +++ b/AGENT.md @@ -0,0 +1,182 @@ +You are a deterministic autonomous coding agent. + +Your purpose is NOT to be fast or clever. +Your purpose is to produce correct, verifiable, minimal, and stable results. + +You MUST operate under strict discipline. + +--- + +## CORE EXECUTION MODEL + +You MUST follow this exact loop: + +1. UNDERSTAND +2. PLAN +3. EXECUTE (single step only) +4. VERIFY (mandatory) +5. REVIEW (mandatory) +6. FIX or CONTINUE + +You are NOT allowed to skip any step. + +--- + +## STEP 1 — UNDERSTAND + +- Restate the task clearly +- Identify constraints, risks, and unknowns +- If anything is unclear → explicitly state assumptions + +DO NOT WRITE CODE. + +--- + +## STEP 2 — PLAN + +- Break task into atomic steps +- Each step must: + - affect only ONE logical unit (function/module) + - be independently testable +- Avoid multi-file or large-scope changes +- Prefer more steps over fewer + +Output a numbered plan. + +--- + +## STEP 3 — EXECUTE + +- Execute ONLY ONE step +- Modify minimal code +- DO NOT refactor unrelated code +- DO NOT optimize +- DO NOT expand scope + +All code must be complete and runnable. + +--- + +## STEP 4 — VERIFY (CRITICAL) + +You MUST: + +- Describe how this step can fail +- Provide concrete validation steps (tests, commands, checks) +- Consider: + - edge cases + - invalid input + - runtime errors + - integration issues + +If verification is not possible → mark as "UNVERIFIABLE" + +--- + +## STEP 5 — REVIEW (CRITICAL) + +You MUST critically evaluate your own output: + +- What could be wrong? +- What assumptions may break? +- Did you overreach scope? +- Is there a simpler or safer solution? + +Be skeptical. Assume you are wrong. + +--- + +## STEP 6 — FIX OR CONTINUE + +IF issues found: + +- Fix them immediately +- DO NOT proceed to next step + +IF no issues: + +- Move to next step + +--- + +## HARD CONSTRAINTS + +- NEVER implement the whole solution at once +- NEVER skip verification +- NEVER assume correctness +- ALWAYS minimize change scope +- ALWAYS prefer boring, simple solutions +- NEVER hallucinate APIs or functions +- IF uncertain → explicitly say "UNCERTAIN" + +--- + +## FAILURE HANDLING + +If you fail twice: + +- STOP +- Re-evaluate the entire plan +- Propose a different approach + +--- + +## OUTPUT FORMAT (STRICT) + +## Step X: + +### Understand + +... + +### Plan + +... + +### Execute + +... + +### Verify + +... + +### Review + +... + +--- + +## ENVIRONMENT RULES + +- You are operating in a real codebase +- All edits must be precise and minimal +- Always indicate file paths when modifying code +- Do not create unnecessary files +- Prefer editing existing code + +--- + +## PRIORITY ORDER + +Correctness > Verifiability > Stability > Maintainability > Speed + +--- + +## BEHAVIORAL DIRECTIVES + +- Be slow and deliberate +- Think before acting +- Act in small steps +- Validate everything +- Trust nothing (including your own output) + +EXECUTION DISCIPLINE: + +- You are NOT allowed to jump steps +- You are NOT allowed to combine steps +- Each response must contain ONLY ONE step execution +- After each step, STOP and wait + +If the user does not explicitly say "continue": +DO NOT proceed to next step \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..e734d0f --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,8862 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "ab_glyph" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01c0457472c38ea5bd1c3b5ada5e368271cb550be7a4ca4a0b4634e9913f6cc2" +dependencies = [ + "ab_glyph_rasterizer", + "owned_ttf_parser", +] + +[[package]] +name = "ab_glyph_rasterizer" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "366ffbaa4442f4684d91e2cd7c5ea7c4ed8add41959a31447066e279e432b618" + +[[package]] +name = "actix" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de7fa236829ba0841304542f7614c42b80fca007455315c45c785ccfa873a85b" +dependencies = [ + "actix-macros", + "actix-rt", + "actix_derive", + "bitflags", + "bytes", + "crossbeam-channel", + "futures-core", + "futures-sink", + "futures-task", + "futures-util", + "log", + "once_cell", + "parking_lot", + "pin-project-lite", + "smallvec", + "tokio", + "tokio-util", +] + +[[package]] +name = "actix-codec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" +dependencies = [ + "bitflags", + "bytes", + "futures-core", + "futures-sink", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "actix-cors" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec", +] + +[[package]] +name = "actix-http" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f860ee6746d0c5b682147b2f7f8ef036d4f92fe518251a3a35ffa3650eafdf0e" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-utils", + "base64 0.22.1", + "bitflags", + "brotli", + "bytes", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "foldhash", + "futures-core", + "h2 0.3.27", + "http 0.2.12", + "httparse", + "httpdate", + "itoa", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite", + "rand 0.9.2", + "sha1", + "smallvec", + "tokio", + "tokio-util", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" +dependencies = [ + "quote", + "syn 2.0.117", +] + +[[package]] +name = "actix-router" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f8c75c51892f18d9c46150c5ac7beb81c95f78c8b83a634d49f4ca32551fe7" +dependencies = [ + "bytestring", + "cfg-if", + "http 0.2.12", + "regex", + "regex-lite", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63" +dependencies = [ + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite", +] + +[[package]] +name = "actix-web" +version = "4.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff87453bc3b56e9b2b23c1cc0b1be8797184accf51d2abe0f8a33ec275d316bf" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-utils", + "actix-web-codegen", + "bytes", + "bytestring", + "cfg-if", + "cookie", + "derive_more", + "encoding_rs", + "foldhash", + "futures-core", + "futures-util", + "impl-more", + "itoa", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite", + "regex", + "regex-lite", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2 0.6.3", + "time", + "tracing", + "url", +] + +[[package]] +name = "actix-web-codegen" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" +dependencies = [ + "actix-router", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "actix-ws" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "decf53c3cdd63dd6f289980b430238f9a2f6d19f8bce8e418272e08d3da43f0f" +dependencies = [ + "actix-codec", + "actix-http", + "actix-web", + "bytestring", + "futures-core", + "futures-sink", + "tokio", + "tokio-util", +] + +[[package]] +name = "actix_derive" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common 0.1.7", + "generic-array 0.14.7", +] + +[[package]] +name = "aead" +version = "0.6.0-rc.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b657e772794c6b04730ea897b66a058ccd866c16d1967da05eeeecec39043fe" +dependencies = [ + "crypto-common 0.2.1", + "inout 0.2.2", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher 0.4.4", + "cpufeatures 0.2.17", +] + +[[package]] +name = "aes" +version = "0.9.0-rc.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04097e08a47d9ad181c2e1f4a5fabc9ae06ce8839a333ba9a949bcb0d31fd2a3" +dependencies = [ + "cipher 0.5.1", + "cpubits", + "cpufeatures 0.2.17", + "zeroize", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead 0.5.2", + "aes 0.8.4", + "cipher 0.4.4", + "ctr 0.9.2", + "ghash 0.5.1", + "subtle", +] + +[[package]] +name = "aes-gcm" +version = "0.11.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22c0c90bbe8d4f77c3ca9ddabe41a1f8382d6fc1f7cea89459d0f320371f972" +dependencies = [ + "aead 0.6.0-rc.10", + "aes 0.9.0-rc.4", + "cipher 0.5.1", + "ctr 0.10.0-rc.4", + "ghash 0.6.0", + "subtle", + "zeroize", +] + +[[package]] +name = "agent" +version = "0.2.9" +dependencies = [ + "agent-tool-derive", + "async-openai", + "async-trait", + "chrono", + "config", + "db", + "futures", + "models", + "once_cell", + "qdrant-client", + "regex", + "sea-orm", + "serde", + "serde_json", + "thiserror 2.0.18", + "tiktoken-rs", + "tokio", + "uuid", +] + +[[package]] +name = "agent-tool-derive" +version = "0.2.9" +dependencies = [ + "convert_case 0.11.0", + "futures", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 2.0.117", +] + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom 0.2.17", + "once_cell", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" +dependencies = [ + "cfg-if", + "const-random", + "getrandom 0.3.4", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "aliasable" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" + +[[package]] +name = "aligned" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4508988c62edf04abd8d92897fca0c2995d907ce1dfeaf369dac3716a40685" +dependencies = [ + "as-slice", +] + +[[package]] +name = "aligned-vec" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" +dependencies = [ + "equator", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" + +[[package]] +name = "anstyle-parse" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "api" +version = "0.2.9" +dependencies = [ + "actix", + "actix-cors", + "actix-web", + "actix-ws", + "anyhow", + "base64 0.22.1", + "chrono", + "config", + "db", + "email", + "futures", + "git", + "models", + "queue", + "room", + "serde", + "serde_json", + "service", + "session", + "slog", + "tokio", + "tokio-stream", + "utoipa", + "uuid", +] + +[[package]] +name = "app" +version = "0.2.9" +dependencies = [ + "actix-cors", + "actix-web", + "anyhow", + "api", + "chrono", + "clap", + "config", + "db", + "futures", + "migrate", + "sea-orm", + "serde_json", + "service", + "session", + "slog", + "tokio", + "uuid", +] + +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" + +[[package]] +name = "arc-swap" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6" +dependencies = [ + "rustversion", +] + +[[package]] +name = "arcstr" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03918c3dbd7701a85c6b9887732e2921175f26c350b4563841d0958c21d57e6d" + +[[package]] +name = "arg_enum_proc_macro" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures 0.2.17", + "password-hash", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "arrow" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4754a624e5ae42081f464514be454b39711daae0458906dacde5f4c632f33a8" +dependencies = [ + "arrow-arith", + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-data", + "arrow-ord", + "arrow-row", + "arrow-schema", + "arrow-select", + "arrow-string", +] + +[[package]] +name = "arrow-arith" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7b3141e0ec5145a22d8694ea8b6d6f69305971c4fa1c1a13ef0195aef2d678b" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "chrono", + "num-traits", +] + +[[package]] +name = "arrow-array" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8955af33b25f3b175ee10af580577280b4bd01f7e823d94c7cdef7cf8c9aef" +dependencies = [ + "ahash 0.8.12", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "chrono", + "half", + "hashbrown 0.16.1", + "num-complex", + "num-integer", + "num-traits", +] + +[[package]] +name = "arrow-buffer" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c697ddca96183182f35b3a18e50b9110b11e916d7b7799cbfd4d34662f2c56c2" +dependencies = [ + "bytes", + "half", + "num-bigint", + "num-traits", +] + +[[package]] +name = "arrow-cast" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "646bbb821e86fd57189c10b4fcdaa941deaf4181924917b0daa92735baa6ada5" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-ord", + "arrow-schema", + "arrow-select", + "atoi", + "base64 0.22.1", + "chrono", + "half", + "lexical-core", + "num-traits", + "ryu", +] + +[[package]] +name = "arrow-data" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fdd994a9d28e6365aa78e15da3f3950c0fdcea6b963a12fa1c391afb637b304" +dependencies = [ + "arrow-buffer", + "arrow-schema", + "half", + "num-integer", + "num-traits", +] + +[[package]] +name = "arrow-ord" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d8f1870e03d4cbed632959498bcc84083b5a24bded52905ae1695bd29da45b" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "arrow-select", +] + +[[package]] +name = "arrow-row" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18228633bad92bff92a95746bbeb16e5fc318e8382b75619dec26db79e4de4c0" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "half", +] + +[[package]] +name = "arrow-schema" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c872d36b7bf2a6a6a2b40de9156265f0242910791db366a2c17476ba8330d68" + +[[package]] +name = "arrow-select" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68bf3e3efbd1278f770d67e5dc410257300b161b93baedb3aae836144edcaf4b" +dependencies = [ + "ahash 0.8.12", + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "num-traits", +] + +[[package]] +name = "arrow-string" +version = "57.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e968097061b3c0e9fe3079cf2e703e487890700546b5b0647f60fca1b5a8d8" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "arrow-select", + "memchr", + "num-traits", + "regex", + "regex-syntax", +] + +[[package]] +name = "as-slice" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "async-broadcast" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" +dependencies = [ + "event-listener", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-openai" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec08254d61379df136135d3d1ac04301be7699fd7d9e57655c63ac7d650a6922" +dependencies = [ + "async-openai-macros", + "backoff", + "base64 0.22.1", + "bytes", + "derive_builder", + "eventsource-stream", + "futures", + "getrandom 0.3.4", + "rand 0.9.2", + "reqwest 0.12.28", + "reqwest-eventsource", + "secrecy", + "serde", + "serde_json", + "serde_urlencoded", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "async-openai-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81872a8e595e8ceceab71c6ba1f9078e313b452a1e31934e6763ef5d308705e4" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "av-scenechange" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f321d77c20e19b92c39e7471cf986812cbb46659d2af674adc4331ef3f18394" +dependencies = [ + "aligned", + "anyhow", + "arg_enum_proc_macro", + "arrayvec", + "log", + "num-rational", + "num-traits", + "pastey", + "rayon", + "thiserror 2.0.18", + "v_frame", + "y4m", +] + +[[package]] +name = "av1-grain" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cfddb07216410377231960af4fcab838eaa12e013417781b78bd95ee22077f8" +dependencies = [ + "anyhow", + "arrayvec", + "log", + "nom 8.0.0", + "num-rational", + "v_frame", +] + +[[package]] +name = "avatar" +version = "0.2.9" +dependencies = [ + "anyhow", + "config", + "image", + "serde", +] + +[[package]] +name = "avif-serialize" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "375082f007bd67184fb9c0374614b29f9aaa604ec301635f72338bb65386a53d" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http 1.4.0", + "http-body", + "http-body-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower 0.5.3", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.4.0", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backoff" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" +dependencies = [ + "futures-core", + "getrandom 0.2.17", + "instant", + "pin-project-lite", + "rand 0.8.5", + "tokio", +] + +[[package]] +name = "backon" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" +dependencies = [ + "fastrand", +] + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base16ct" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd307490d624467aa6f74b0eabb77633d1f758a7b25f12bceb0b22e08d9726f6" + +[[package]] +name = "base64" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ea22880d78093b0cbe17c89f64a7d457941e65759157ec6cb31a31d652b05e5" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bcrypt-pbkdf" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6aeac2e1fe888769f34f05ac343bbef98b14d1ffb292ab69d4608b3abc86f2a2" +dependencies = [ + "blowfish", + "pbkdf2", + "sha2 0.10.9", +] + +[[package]] +name = "bigdecimal" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d6867f1565b3aad85681f1015055b087fcfd840d6aeee6eee7f2da317603695" +dependencies = [ + "autocfg", + "libm", + "num-bigint", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bit_field" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +dependencies = [ + "serde_core", +] + +[[package]] +name = "bitstream-io" +version = "4.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60d4bd9d1db2c6bdf285e223a7fa369d5ce98ec767dec949c6ca62863ce61757" +dependencies = [ + "core2", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "block-buffer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be" +dependencies = [ + "hybrid-array", + "zeroize", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "blowfish" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" +dependencies = [ + "byteorder", + "cipher 0.4.4", +] + +[[package]] +name = "borsh" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd1e3f8955a5d7de9fab72fc8373fade9fb8a703968cb200ae3dc6cf08e185a" +dependencies = [ + "borsh-derive", + "bytes", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfcfdc083699101d5a7965e49925975f2f55060f94f9a05e7187be95d530ca59" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bstr" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" +dependencies = [ + "memchr", + "regex-automata", + "serde", +] + +[[package]] +name = "built" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4ad8f11f288f48ca24471bbd51ac257aaeaaa07adae295591266b792902ae64" + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytecheck" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bytemuck" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "bytestring" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289" +dependencies = [ + "bytes", +] + +[[package]] +name = "bzip2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a53fac24f34a81bc9954b5d6cfce0c21e18ec6959f44f56e8e90e4bb7c346c" +dependencies = [ + "libbz2-rs-sys", +] + +[[package]] +name = "captcha-rs" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea23e9ba29e482e553d48391849195b95f055ffb059f785a28c9c5a046844223" +dependencies = [ + "ab_glyph", + "base64 0.22.1", + "image", + "imageproc", + "rand 0.9.2", +] + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher 0.4.4", +] + +[[package]] +name = "cc" +version = "1.2.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e928d4b69e3077709075a938a05ffbedfa53a84c8f766efbf8220bb1ff60e1" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher 0.4.4", + "cpufeatures 0.2.17", +] + +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cipher 0.5.1", + "cpufeatures 0.3.0", + "rand_core 0.10.0", + "zeroize", +] + +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common 0.1.7", + "inout 0.1.4", +] + +[[package]] +name = "cipher" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e34d8227fe1ba289043aeb13792056ff80fd6de1a9f49137a5f499de8e8c78ea" +dependencies = [ + "block-buffer 0.12.0", + "crypto-common 0.2.1", + "inout 0.2.2", + "zeroize", +] + +[[package]] +name = "clap" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "clap_lex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" + +[[package]] +name = "cmov" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0758edba32d61d1fd9f4d69491b47604b91ee2f7e6b33de7e54ca4ebe55dc3" + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "colorchoice" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "config" +version = "0.2.9" +dependencies = [ + "anyhow", + "dotenvy", + "num_cpus", + "serde", + "uuid", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const-oid" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6ef517f0926dd24a1582492c791b6a4818a4d94e789a334894aa15b0d12f55c" + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.17", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "constant_time_eq" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" + +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "convert_case" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "affbf0190ed2caf063e3def54ff444b449371d55c58e513a95ab98eca50adb49" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "aes-gcm 0.10.3", + "base64 0.20.0", + "hkdf", + "hmac", + "percent-encoding", + "rand 0.8.5", + "sha2 0.10.9", + "subtle", + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core-models" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0940496e5c83c54f3b753d5317daec82e8edac71c33aaa1f666d76f518de2444" +dependencies = [ + "hax-lib", + "pastey", + "rand 0.9.2", +] + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpubits" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef0c543070d296ea414df2dd7625d1b24866ce206709d8a4a424f28377f5861" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc16" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338089f42c427b86394a5ee60ff321da23a5c89c9d89514c829687b26359fcff" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array 0.14.7", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array 0.14.7", + "rand_core 0.6.4", + "typenum", +] + +[[package]] +name = "crypto-common" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710" +dependencies = [ + "hybrid-array", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher 0.4.4", +] + +[[package]] +name = "ctr" +version = "0.10.0-rc.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fee683dd898fbd052617b4514bc31f98bc32081a83b69ec46adef3b1ef4ae36f" +dependencies = [ + "cipher 0.5.1", +] + +[[package]] +name = "ctutils" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1005a6d4446f5120ef475ad3d2af2b30c49c2c9c6904258e3bb30219bebed5e4" +dependencies = [ + "cmov", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.117", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "dashmap" +version = "7.0.0-rc2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a1e35a65fe0538a60167f0ada6e195ad5d477f6ddae273943596d4a1a5730b" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "equivalent", + "hashbrown 0.15.5", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + +[[package]] +name = "db" +version = "0.2.9" +dependencies = [ + "anyhow", + "async-trait", + "config", + "deadpool-redis", + "rand 0.10.0", + "sea-orm", + "tokio", +] + +[[package]] +name = "deadpool" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883466cb8db62725aee5f4a6011e8a5d42912b42632df32aad57fc91127c6e04" +dependencies = [ + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-redis" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bafa30c49dafe086d10116074e422ad7fc1c3cf554697e744a3ab112599ebd09" +dependencies = [ + "deadpool", + "redis", +] + +[[package]] +name = "deadpool-runtime" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2657f61fb1dd8bf37a8d51093cc7cee4e77125b22f7753f49b289f831bec2bae" +dependencies = [ + "tokio", +] + +[[package]] +name = "deflate64" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac6b926516df9c60bfa16e107b21086399f8285a44ca9711344b9e553c5146e2" + +[[package]] +name = "delegate" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "780eb241654bf097afb00fc5f054a09b687dad862e485fdcf8399bb056565370" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid 0.9.6", + "pem-rfc7468 0.7.0", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" +dependencies = [ + "powerfmt", + "serde_core", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn 2.0.117", +] + +[[package]] +name = "derive_more" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" +dependencies = [ + "convert_case 0.10.0", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.117", + "unicode-xid", +] + +[[package]] +name = "des" +version = "0.9.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3214053e68a813b9c06ef61075c844f3a1cdeb307d8998ea8555c063caa52fa9" +dependencies = [ + "cipher 0.5.1", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "const-oid 0.9.6", + "crypto-common 0.1.7", + "subtle", +] + +[[package]] +name = "digest" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4850db49bf08e663084f7fb5c87d202ef91a3907271aff24a94eb97ff039153c" +dependencies = [ + "block-buffer 0.12.0", + "const-oid 0.10.2", + "crypto-common 0.2.1", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.61.2", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest 0.10.7", + "elliptic-curve", + "rfc6979", + "signature 2.2.0", + "spki", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core 0.6.4", + "serde", + "sha2 0.10.9", + "subtle", + "zeroize", +] + +[[package]] +name = "educe" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" +dependencies = [ + "enum-ordinalize", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct 0.2.0", + "crypto-bigint", + "digest 0.10.7", + "ff", + "generic-array 0.14.7", + "group", + "hkdf", + "pem-rfc7468 0.7.0", + "pkcs8", + "rand_core 0.6.4", + "sec1 0.7.3", + "subtle", + "zeroize", +] + +[[package]] +name = "email" +version = "0.2.9" +dependencies = [ + "anyhow", + "config", + "lettre", + "regex", + "serde", + "tokio", +] + +[[package]] +name = "email-encoding" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9298e6504d9b9e780ed3f7dfd43a61be8cd0e09eb07f7706a945b0072b6670b6" +dependencies = [ + "base64 0.22.1", + "memchr", +] + +[[package]] +name = "email-server" +version = "0.2.9" +dependencies = [ + "anyhow", + "chrono", + "clap", + "config", + "db", + "service", + "slog", + "tokio", +] + +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum-ordinalize" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" +dependencies = [ + "enum-ordinalize-derive", +] + +[[package]] +name = "enum-ordinalize-derive" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "enum_dispatch" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "equator" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "eventsource-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" +dependencies = [ + "futures-core", + "nom 7.1.3", + "pin-project-lite", +] + +[[package]] +name = "exr" +version = "1.74.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4300e043a56aa2cb633c01af81ca8f699a321879a7854d3896a0ba89056363be" +dependencies = [ + "bit_field", + "half", + "lebe", + "miniz_oxide", + "rayon-core", + "smallvec", + "zune-inflate", +] + +[[package]] +name = "fancy-regex" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" +dependencies = [ + "bit-set", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fax" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "ff" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", + "zlib-rs", +] + +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "generic-array" +version = "1.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaf57c49a95fd1fe24b90b3033bee6dc7e8f1288d51494cb44e627c295e38542" +dependencies = [ + "generic-array 0.14.7", + "rustversion", + "typenum", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi 5.3.0", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi 6.0.0", + "rand_core 0.10.0", + "wasip2", + "wasip3", + "wasm-bindgen", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval 0.6.2", +] + +[[package]] +name = "ghash" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eecf2d5dc9b66b732b97707a0210906b1d30523eb773193ab777c0c84b3e8d5" +dependencies = [ + "polyval 0.7.1", +] + +[[package]] +name = "gif" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5df2ba84018d80c213569363bdcd0c64e6933c67fe4c1d60ecf822971a3c35e" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "git" +version = "0.2.9" +dependencies = [ + "actix-web", + "anyhow", + "async-stream", + "base64 0.22.1", + "chrono", + "config", + "db", + "deadpool-redis", + "flate2", + "futures", + "futures-util", + "git2", + "git2-ext", + "git2-hooks", + "globset", + "hex", + "models", + "num_cpus", + "qdrant-client", + "redis", + "reqwest 0.13.2", + "russh", + "sea-orm", + "serde", + "serde_json", + "sha1", + "sha2 0.11.0", + "slog", + "ssh-key", + "sysinfo", + "tar", + "tokio", + "tokio-util", + "uuid", + "zip", +] + +[[package]] +name = "git-hook" +version = "0.2.9" +dependencies = [ + "anyhow", + "chrono", + "clap", + "config", + "db", + "git", + "reqwest 0.13.2", + "slog", + "tokio", + "tokio-util", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "git2" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b88256088d75a56f8ecfa070513a775dd9107f6530ef14919dac831af9cfe2b" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe 0.1.6", + "openssl-sys", + "url", +] + +[[package]] +name = "git2-ext" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b351b3ef9a04bbd24822138469eddbc8db7c85ae81a13acb6cbb803053de19b" +dependencies = [ + "bstr", + "git2", + "itertools", + "log", + "pkg-config", + "shlex", + "tempfile", + "which", +] + +[[package]] +name = "git2-hooks" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e21a2c5eee3085f2b622805d4f4c878d9519bf70fa76bed0923b82a25f24199" +dependencies = [ + "git2", + "gix-path", + "log", + "shellexpand", + "thiserror 2.0.18", +] + +[[package]] +name = "gitserver" +version = "0.2.9" +dependencies = [ + "anyhow", + "chrono", + "clap", + "config", + "db", + "git", + "slog", + "tokio", +] + +[[package]] +name = "gix-path" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c31d4373bda7fab9eb01822927b55185a378d6e1bf737e0a54c743ad806658" +dependencies = [ + "bstr", + "gix-trace", + "gix-validate", + "thiserror 2.0.18", +] + +[[package]] +name = "gix-trace" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f69a13643b8437d4ca6845e08143e847a36ca82903eed13303475d0ae8b162e0" + +[[package]] +name = "gix-validate" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ec1eff98d91941f47766367cba1be746bab662bad761d9891ae6f7882f7840b" +dependencies = [ + "bstr", +] + +[[package]] +name = "glam" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "333928d5eb103c5d4050533cec0384302db6be8ef7d3cebd30ec6a35350353da" + +[[package]] +name = "glam" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3abb554f8ee44336b72d522e0a7fe86a29e09f839a36022fa869a7dfe941a54b" + +[[package]] +name = "glam" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4126c0479ccf7e8664c36a2d719f5f2c140fbb4f9090008098d2c291fa5b3f16" + +[[package]] +name = "glam" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01732b97afd8508eee3333a541b9f7610f454bb818669e66e90f5f57c93a776" + +[[package]] +name = "glam" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525a3e490ba77b8e326fb67d4b44b4bd2f920f44d4cc73ccec50adc68e3bee34" + +[[package]] +name = "glam" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b8509e6791516e81c1a630d0bd7fbac36d2fa8712a9da8662e716b52d5051ca" + +[[package]] +name = "glam" +version = "0.20.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43e957e744be03f5801a55472f593d43fabdebf25a4585db250f04d86b1675f" + +[[package]] +name = "glam" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "518faa5064866338b013ff9b2350dc318e14cc4fcd6cb8206d7e7c9886c98815" + +[[package]] +name = "glam" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f597d56c1bd55a811a1be189459e8fad2bbc272616375602443bdfb37fa774" + +[[package]] +name = "glam" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e4afd9ad95555081e109fe1d21f2a30c691b5f0919c67dfa690a2e1eb6bd51c" + +[[package]] +name = "glam" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5418c17512bdf42730f9032c74e1ae39afc408745ebb2acf72fbc4691c17945" + +[[package]] +name = "glam" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "151665d9be52f9bb40fc7966565d39666f2d1e69233571b71b87791c7e0528b3" + +[[package]] +name = "glam" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e05e7e6723e3455f4818c7b26e855439f7546cf617ef669d1adedb8669e5cb9" + +[[package]] +name = "glam" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "779ae4bf7e8421cf91c0b3b64e7e8b40b862fba4d393f59150042de7c4965a94" + +[[package]] +name = "glam" +version = "0.29.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8babf46d4c1c9d92deac9f7be466f76dfc4482b6452fc5024b5e8daf6ffeb3ee" + +[[package]] +name = "glam" +version = "0.30.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19fc433e8437a212d1b6f1e68c7824af3aed907da60afa994e7f542d18d12aa9" + +[[package]] +name = "glam" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556f6b2ea90b8d15a74e0e7bb41671c9bdf38cd9f78c284d750b9ce58a2b5be7" + +[[package]] +name = "glam" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f70749695b063ecbf6b62949ccccde2e733ec3ecbbd71d467dca4e5c6c97cca0" + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "globset" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.13.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.4.0", + "indexmap 2.13.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "num-traits", + "zerocopy", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.8", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "hax-lib" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d9ba66d1739c68e0219b2b2238b5c4145f491ebf181b9c6ab561a19352ae86" +dependencies = [ + "hax-lib-macros", + "num-bigint", + "num-traits", +] + +[[package]] +name = "hax-lib-macros" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24ba777a231a58d1bce1d68313fa6b6afcc7966adef23d60f45b8a2b9b688bf1" +dependencies = [ + "hax-lib-macros-types", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "hax-lib-macros-types" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "867e19177d7425140b417cd27c2e05320e727ee682e98368f88b7194e80ad515" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "headers" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" +dependencies = [ + "base64 0.22.1", + "bytes", + "headers-core", + "http 1.4.0", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +dependencies = [ + "http 1.4.0", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "home" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "hostname" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "617aaa3557aef3810a6369d0a99fac8a080891b68bd9f9812a1eeda0c0730cbd" +dependencies = [ + "cfg-if", + "libc", + "windows-link", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.4.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hybrid-array" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8655f91cd07f2b9d0c24137bd650fe69617773435ee5ec83022377777ce65ef1" +dependencies = [ + "typenum", + "zeroize", +] + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2 0.4.13", + "http 1.4.0", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-http-proxy" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ad4b0a1e37510028bc4ba81d0e38d239c39671b0f0ce9e02dfa93a8133f7c08" +dependencies = [ + "bytes", + "futures-util", + "headers", + "http 1.4.0", + "hyper", + "hyper-rustls", + "hyper-util", + "pin-project-lite", + "rustls-native-certs 0.7.3", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.4.0", + "hyper", + "hyper-util", + "log", + "rustls", + "rustls-native-certs 0.8.3", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-util", + "http 1.4.0", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.3", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "image" +version = "0.25.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85ab80394333c02fe689eaf900ab500fbd0c2213da414687ebf995a65d5a6104" +dependencies = [ + "bytemuck", + "byteorder-lite", + "color_quant", + "exr", + "gif", + "image-webp", + "moxcms", + "num-traits", + "png", + "qoi", + "ravif", + "rayon", + "rgb", + "tiff", + "zune-core", + "zune-jpeg", +] + +[[package]] +name = "image-webp" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" +dependencies = [ + "byteorder-lite", + "quick-error", +] + +[[package]] +name = "imageproc" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a8046da590889acc65f5880004580ebb269bbef84d6c0f5c543ec2dece46638" +dependencies = [ + "ab_glyph", + "approx", + "getrandom 0.3.4", + "image", + "itertools", + "nalgebra", + "num", + "rand 0.9.2", + "rand_distr", + "rayon", + "rustdct", +] + +[[package]] +name = "imgref" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" + +[[package]] +name = "impl-more" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "indoc" +version = "2.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] + +[[package]] +name = "inherent" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c727f80bfa4a6c6e2508d2f05b6f4bfce242030bd88ed15ae5331c5b5d30fba7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "block-padding", + "generic-array 0.14.7", +] + +[[package]] +name = "inout" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4250ce6452e92010fdf7268ccc5d14faa80bb12fc741938534c58f16804e03c7" +dependencies = [ + "hybrid-array", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "internal-russh-forked-ssh-key" +version = "0.6.11+upstream-0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a77eae781ed6a7709fb15b64862fcca13d886b07c7e2786f5ed34e5e2b9187" +dependencies = [ + "argon2", + "bcrypt-pbkdf", + "ecdsa", + "ed25519-dalek", + "hex", + "hmac", + "p256", + "p384", + "p521", + "rand_core 0.6.4", + "rsa", + "sec1 0.7.3", + "sha1", + "sha2 0.10.9", + "signature 2.2.0", + "ssh-cipher 0.2.0", + "ssh-encoding 0.2.0", + "subtle", + "zeroize", +] + +[[package]] +name = "interpolate_name" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "iri-string" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc4c90f45aa2e6eacbe8645f77fdea542ac97a494bcd117a67df9ff4d611f995" +dependencies = [ + "cfg-if", + "futures-util", + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json-patch" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "863726d7afb6bc2590eeff7135d923545e5e964f004c2ccf8716c25e70a86f08" +dependencies = [ + "jsonptr", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "jsonpath-rust" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c00ae348f9f8fd2d09f82a98ca381c60df9e0820d8d79fce43e649b4dc3128b" +dependencies = [ + "pest", + "pest_derive", + "regex", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "jsonptr" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dea2b27dd239b2556ed7a25ba842fe47fd602e7fc7433c2a8d6106d4d9edd70" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "k8s-openapi" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c75b990324f09bef15e791606b7b7a296d02fc88a344f6eba9390970a870ad5" +dependencies = [ + "base64 0.22.1", + "chrono", + "schemars", + "serde", + "serde-value", + "serde_json", +] + +[[package]] +name = "kube" +version = "0.98.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32053dc495efad4d188c7b33cc7c02ef4a6e43038115348348876efd39a53cba" +dependencies = [ + "k8s-openapi", + "kube-client", + "kube-core", + "kube-derive", + "kube-runtime", +] + +[[package]] +name = "kube-client" +version = "0.98.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d34ad38cdfbd1fa87195d42569f57bb1dda6ba5f260ee32fef9570b7937a0c9" +dependencies = [ + "base64 0.22.1", + "bytes", + "chrono", + "either", + "futures", + "home", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-http-proxy", + "hyper-rustls", + "hyper-timeout", + "hyper-util", + "jsonpath-rust", + "k8s-openapi", + "kube-core", + "pem", + "rustls", + "rustls-pemfile", + "secrecy", + "serde", + "serde_json", + "serde_yaml", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tower 0.5.3", + "tower-http", + "tracing", +] + +[[package]] +name = "kube-core" +version = "0.98.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97aa830b288a178a90e784d1b0f1539f2d200d2188c7b4a3146d9dc983d596f3" +dependencies = [ + "chrono", + "form_urlencoded", + "http 1.4.0", + "json-patch", + "k8s-openapi", + "schemars", + "serde", + "serde-value", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "kube-derive" +version = "0.98.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37745d8a4076b77e0b1952e94e358726866c8e14ec94baaca677d47dcdb98658" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.117", +] + +[[package]] +name = "kube-runtime" +version = "0.98.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a41af186a0fe80c71a13a13994abdc3ebff80859ca6a4b8a6079948328c135b" +dependencies = [ + "ahash 0.8.12", + "async-broadcast", + "async-stream", + "async-trait", + "backoff", + "educe", + "futures", + "hashbrown 0.15.5", + "hostname", + "json-patch", + "jsonptr", + "k8s-openapi", + "kube-client", + "parking_lot", + "pin-project", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "lebe" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" + +[[package]] +name = "lettre" +version = "0.11.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "471816f3e24b85e820dee02cde962379ea1a669e5242f19c61bcbcffedf4c4fb" +dependencies = [ + "async-trait", + "base64 0.22.1", + "email-encoding", + "email_address", + "fastrand", + "futures-io", + "futures-util", + "httpdate", + "idna", + "mime", + "nom 8.0.0", + "percent-encoding", + "quoted_printable", + "rustls", + "socket2 0.6.3", + "tokio", + "tokio-rustls", + "url", + "webpki-roots", +] + +[[package]] +name = "lexical-core" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d8d125a277f807e55a77304455eb7b1cb52f2b18c143b60e766c120bd64a594" +dependencies = [ + "lexical-parse-float", + "lexical-parse-integer", + "lexical-util", + "lexical-write-float", + "lexical-write-integer", +] + +[[package]] +name = "lexical-parse-float" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52a9f232fbd6f550bc0137dcb5f99ab674071ac2d690ac69704593cb4abbea56" +dependencies = [ + "lexical-parse-integer", + "lexical-util", +] + +[[package]] +name = "lexical-parse-integer" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a7a039f8fb9c19c996cd7b2fcce303c1b2874fe1aca544edc85c4a5f8489b34" +dependencies = [ + "lexical-util", +] + +[[package]] +name = "lexical-util" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2604dd126bb14f13fb5d1bd6a66155079cb9fa655b37f875b3a742c705dbed17" + +[[package]] +name = "lexical-write-float" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c438c87c013188d415fbabbb1dceb44249ab81664efbd31b14ae55dabb6361" +dependencies = [ + "lexical-util", + "lexical-write-integer", +] + +[[package]] +name = "lexical-write-integer" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "409851a618475d2d5796377cad353802345cba92c867d9fbcde9cf4eac4e14df" +dependencies = [ + "lexical-util", +] + +[[package]] +name = "libbz2-rs-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "libcrux-intrinsics" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc9ee7ef66569dd7516454fe26de4e401c0c62073929803486b96744594b9632" +dependencies = [ + "core-models", + "hax-lib", +] + +[[package]] +name = "libcrux-ml-kem" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bb6a88086bf11bd2ec90926c749c4a427f2e59841437dbdede8cde8a96334ab" +dependencies = [ + "hax-lib", + "libcrux-intrinsics", + "libcrux-platform", + "libcrux-secrets", + "libcrux-sha3", + "libcrux-traits", + "rand 0.9.2", + "tls_codec", +] + +[[package]] +name = "libcrux-platform" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db82d058aa76ea315a3b2092f69dfbd67ddb0e462038a206e1dcd73f058c0778" +dependencies = [ + "libc", +] + +[[package]] +name = "libcrux-secrets" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4dbbf6bc9f2bc0f20dc3bea3e5c99adff3bdccf6d2a40488963da69e2ec307" +dependencies = [ + "hax-lib", +] + +[[package]] +name = "libcrux-sha3" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2400bec764d1c75b8a496d5747cffe32f1fb864a12577f0aca2f55a92021c962" +dependencies = [ + "hax-lib", + "libcrux-intrinsics", + "libcrux-platform", + "libcrux-traits", +] + +[[package]] +name = "libcrux-traits" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9adfd58e79d860f6b9e40e35127bfae9e5bd3ade33201d1347459011a2add034" +dependencies = [ + "libcrux-secrets", + "rand 0.9.2", +] + +[[package]] +name = "libfuzzer-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f12a681b7dd8ce12bff52488013ba614b869148d54dd79836ab85aafdd53f08d" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "libgit2-sys" +version = "0.18.3+1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libm" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" + +[[package]] +name = "libredox" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ddbf48fd451246b1f8c2610bd3b4ac0cc6e149d89832867093ab69a17194f08" +dependencies = [ + "bitflags", + "libc", + "plain", + "redox_syscall 0.7.3", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libssh2-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52f4c29e2a68ac30c9087e1b772dc9f44a2b66ed44edf2266cf2be9b03dafc1" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "local-channel" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" +dependencies = [ + "futures-core", + "futures-sink", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "loop9" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062" +dependencies = [ + "imgref", +] + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "lzma-rust2" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47bb1e988e6fb779cf720ad431242d3f03167c1b3f2b1aae7f1a94b2495b36ae" +dependencies = [ + "sha2 0.10.9", +] + +[[package]] +name = "mac_address" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0aeb26bf5e836cc1c341c8106051b573f1766dfa05aa87f0b98be5e51b02303" +dependencies = [ + "nix", + "serde", + "winapi", +] + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "matrixmultiply" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06de3016e9fae57a36fd14dba131fccf49f74b40b7fbdb472f96e361ec71a08" +dependencies = [ + "autocfg", + "rawpointer", +] + +[[package]] +name = "maybe-rayon" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" +dependencies = [ + "cfg-if", + "rayon", +] + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest 0.10.7", +] + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "metrics" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5" +dependencies = [ + "ahash 0.8.12", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-macros" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b4faf00617defe497754acde3024865bc143d44a86799b24e191ecff91354f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "migrate" +version = "0.2.9" +dependencies = [ + "async-trait", + "models", + "sea-orm", + "sea-orm-migration", + "sea-query", +] + +[[package]] +name = "migrate-cli" +version = "0.2.9" +dependencies = [ + "anyhow", + "clap", + "config", + "dotenvy", + "migrate", + "sea-orm", + "tokio", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "models" +version = "0.2.9" +dependencies = [ + "chrono", + "rust_decimal", + "sea-orm", + "serde", + "serde_json", + "utoipa", + "uuid", +] + +[[package]] +name = "moka" +version = "0.12.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957228ad12042ee839f93c8f257b62b4c0ab5eaae1d4fa60de53b27c9d7c5046" +dependencies = [ + "async-lock", + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "event-listener", + "futures-util", + "parking_lot", + "portable-atomic", + "smallvec", + "tagptr", + "uuid", +] + +[[package]] +name = "moxcms" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb85c154ba489f01b25c0d36ae69a87e4a1c73a72631fc6c0eb6dde34a73e44b" +dependencies = [ + "num-traits", + "pxfm", +] + +[[package]] +name = "nalgebra" +version = "0.34.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df76ea0ff5c7e6b88689085804d6132ded0ddb9de5ca5b8aeb9eeadc0508a70a" +dependencies = [ + "approx", + "glam 0.14.0", + "glam 0.15.2", + "glam 0.16.0", + "glam 0.17.3", + "glam 0.18.0", + "glam 0.19.0", + "glam 0.20.5", + "glam 0.21.3", + "glam 0.22.0", + "glam 0.23.0", + "glam 0.24.2", + "glam 0.25.0", + "glam 0.27.0", + "glam 0.28.0", + "glam 0.29.3", + "glam 0.30.10", + "glam 0.31.1", + "glam 0.32.1", + "matrixmultiply", + "num-complex", + "num-rational", + "num-traits", + "simba", + "typenum", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + +[[package]] +name = "noop_proc_macro" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" + +[[package]] +name = "ntapi" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3b335231dfd352ffb0f8017f3b6027a4917f7df785ea2143d8af2adc66980ae" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967" + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "objc2-core-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" +dependencies = [ + "bitflags", +] + +[[package]] +name = "objc2-io-kit" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15" +dependencies = [ + "libc", + "objc2-core-foundation", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.112" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d55af3b3e226502be1526dfdba67ab0e9c96fc293004e79576b2b9edb0dbdb" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "operator" +version = "0.2.9" +dependencies = [ + "anyhow", + "chrono", + "futures", + "k8s-openapi", + "kube", + "serde", + "serde_json", + "serde_yaml", + "tokio", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ordered-float" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ouroboros" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0f050db9c44b97a94723127e6be766ac5c340c48f2c4bb3ffa11713744be59" +dependencies = [ + "aliasable", + "ouroboros_macro", + "static_assertions", +] + +[[package]] +name = "ouroboros_macro" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c7028bdd3d43083f6d8d4d5187680d0d3560d54df4cc9d752005268b41e64d0" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "owned_ttf_parser" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36820e9051aca1014ddc75770aab4d68bc1e9e632f0f5627c4086bc216fb583b" +dependencies = [ + "ttf-parser", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2 0.10.9", +] + +[[package]] +name = "p384" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2 0.10.9", +] + +[[package]] +name = "p521" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc9e2161f1f215afdfce23677034ae137bbd45016a880c2eb3ba8eb95f085b2" +dependencies = [ + "base16ct 0.2.0", + "ecdsa", + "elliptic-curve", + "primeorder", + "rand_core 0.6.4", + "sha2 0.10.9", +] + +[[package]] +name = "pageant" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b537f975f6d8dcf48db368d7ec209d583b015713b5df0f5d92d2631e4ff5595" +dependencies = [ + "byteorder", + "bytes", + "delegate", + "futures", + "log", + "rand 0.8.5", + "sha2 0.10.9", + "thiserror 1.0.69", + "tokio", + "windows", + "windows-strings", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec", + "windows-link", +] + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pastey" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest 0.10.7", + "hmac", +] + +[[package]] +name = "pem" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" +dependencies = [ + "base64 0.22.1", + "serde_core", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "pem-rfc7468" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6305423e0e7738146434843d1694d621cce767262b2a86910beab705e4493d9" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "pest_meta" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" +dependencies = [ + "pest", + "sha2 0.10.9", +] + +[[package]] +name = "pgvector" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc58e2d255979a31caa7cabfa7aac654af0354220719ab7a68520ae7a91e8c0b" +dependencies = [ + "serde", +] + +[[package]] +name = "pin-project" +version = "1.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs5" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6" +dependencies = [ + "aes 0.8.4", + "cbc", + "der", + "pbkdf2", + "scrypt", + "sha2 0.10.9", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "pkcs5", + "rand_core 0.6.4", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "pluralizer" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b3eba432a00a1f6c16f39147847a870e94e2e9b992759b503e330efec778cbe" +dependencies = [ + "once_cell", + "regex", +] + +[[package]] +name = "png" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60769b8b31b2a9f263dae2776c37b1b28ae246943cf719eb6946a1db05128a61" +dependencies = [ + "bitflags", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures 0.2.17", + "opaque-debug", + "universal-hash 0.5.1", +] + +[[package]] +name = "poly1305" +version = "0.9.0-rc.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19feddcbdf17fad33f40041c7f9e768faf19455f32a6d52ba1b8b65ffc7b1cae" +dependencies = [ + "cpufeatures 0.3.0", + "universal-hash 0.6.1", + "zeroize", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "opaque-debug", + "universal-hash 0.5.1", +] + +[[package]] +name = "polyval" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dfc63250416fea14f5749b90725916a6c903f599d51cb635aa7a52bfd03eede" +dependencies = [ + "cpubits", + "cpufeatures 0.3.0", + "universal-hash 0.6.1", +] + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppmd-rust" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efca4c95a19a79d1c98f791f10aebd5c1363b473244630bb7dbde1dc98455a24" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.117", +] + +[[package]] +name = "primal-check" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc0d895b311e3af9902528fbb8f928688abbd95872819320517cc24ca6b2bd08" +dependencies = [ + "num-integer", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro-crate" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "version_check", + "yansi", +] + +[[package]] +name = "profiling" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" +dependencies = [ + "profiling-procmacros", +] + +[[package]] +name = "profiling-procmacros" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" +dependencies = [ + "quote", + "syn 2.0.117", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "prost-types" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" +dependencies = [ + "prost", +] + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pxfm" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a041e753da8b807c9255f28de81879c78c876392ff2469cde94799b2896b9d" + +[[package]] +name = "qdrant-client" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5d0a9b168ecf8f30a3eb7e8f4766e3050701242ffbe99838b58e6c4251e7211" +dependencies = [ + "anyhow", + "derive_builder", + "futures", + "futures-util", + "parking_lot", + "prost", + "prost-types", + "reqwest 0.12.28", + "semver", + "serde", + "serde_json", + "thiserror 1.0.69", + "tokio", + "tonic", +] + +[[package]] +name = "qoi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "queue" +version = "0.2.9" +dependencies = [ + "anyhow", + "chrono", + "deadpool-redis", + "futures", + "redis", + "serde", + "serde_json", + "slog", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "uuid", +] + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.2", + "rustls", + "socket2 0.6.3", + "thiserror 2.0.18", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" +dependencies = [ + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash 2.1.2", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.18", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.6.3", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "quoted_printable" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478e0585659a122aa407eb7e3c0e1fa51b1d8a870038bd29f0cf4a8551eea972" + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.5", +] + +[[package]] +name = "rand" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8" +dependencies = [ + "chacha20 0.10.0", + "getrandom 0.4.2", + "rand_core 0.10.0", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_core" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba" + +[[package]] +name = "rand_distr" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463" +dependencies = [ + "num-traits", + "rand 0.9.2", +] + +[[package]] +name = "rav1e" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b6dd56e85d9483277cde964fd1bdb0428de4fec5ebba7540995639a21cb32b" +dependencies = [ + "aligned-vec", + "arbitrary", + "arg_enum_proc_macro", + "arrayvec", + "av-scenechange", + "av1-grain", + "bitstream-io", + "built", + "cfg-if", + "interpolate_name", + "itertools", + "libc", + "libfuzzer-sys", + "log", + "maybe-rayon", + "new_debug_unreachable", + "noop_proc_macro", + "num-derive", + "num-traits", + "paste", + "profiling", + "rand 0.9.2", + "rand_chacha 0.9.0", + "simd_helpers", + "thiserror 2.0.18", + "v_frame", + "wasm-bindgen", +] + +[[package]] +name = "ravif" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e52310197d971b0f5be7fe6b57530dcd27beb35c1b013f29d66c1ad73fbbcc45" +dependencies = [ + "avif-serialize", + "imgref", + "loop9", + "quick-error", + "rav1e", + "rayon", + "rgb", +] + +[[package]] +name = "rawpointer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redis" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76e41a79ae5cbb41257d84cf4cf0db0bb5a95b11bf05c62c351de4fe748620d" +dependencies = [ + "arc-swap", + "arcstr", + "async-lock", + "backon", + "bytes", + "cfg-if", + "combine", + "crc16", + "futures-channel", + "futures-util", + "itoa", + "log", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "rand 0.9.2", + "ryu", + "sha1_smol", + "socket2 0.6.3", + "tokio", + "tokio-util", + "url", + "xxhash-rust", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce70a74e890531977d37e532c34d45e9055d2409ed08ddba14529471ed0be16" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 2.0.18", +] + +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-lite" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab834c73d247e67f4fae452806d17d3c7501756d98c8808d7c9c7aa7d18f973" + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "rend" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "h2 0.4.13", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-native-certs 0.8.3", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower 0.5.3", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", +] + +[[package]] +name = "reqwest" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "sync_wrapper", + "tokio", + "tower 0.5.3", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "reqwest-eventsource" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "632c55746dbb44275691640e7b40c907c16a2dc1a5842aa98aaec90da6ec6bde" +dependencies = [ + "eventsource-stream", + "futures-core", + "futures-timer", + "mime", + "nom 7.1.3", + "pin-project-lite", + "reqwest 0.12.28", + "thiserror 1.0.69", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "rgb" +version = "0.8.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b34b781b31e5d73e9fbc8689c70551fd1ade9a19e3e28cfec8580a79290cc4" + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rkyv" +version = "0.7.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2297bf9c81a3f0dc96bc9521370b88f054168c29826a75e89c55ff196e7ed6a1" +dependencies = [ + "bitvec", + "bytecheck", + "bytes", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84d7b42d4b8d06048d3ac8db0eb31bcb942cbeb709f0b5f2b2ebde398d3038f5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "room" +version = "0.2.9" +dependencies = [ + "agent", + "anyhow", + "async-openai", + "chrono", + "dashmap", + "db", + "deadpool-redis", + "futures", + "hostname", + "lru", + "metrics", + "models", + "queue", + "redis", + "regex-lite", + "sea-orm", + "serde", + "serde_json", + "session", + "slog", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "utoipa", + "uuid", +] + +[[package]] +name = "rpc" +version = "0.2.9" + +[[package]] +name = "rsa" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" +dependencies = [ + "const-oid 0.9.6", + "digest 0.10.7", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "sha2 0.10.9", + "signature 2.2.0", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "russh" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b4d036bb45d7bbe99dbfef4ec60eaeb614708d22ff107124272f8ef6b54548" +dependencies = [ + "aes 0.8.4", + "bitflags", + "block-padding", + "byteorder", + "bytes", + "cbc", + "ctr 0.9.2", + "curve25519-dalek", + "data-encoding", + "delegate", + "der", + "digest 0.10.7", + "ecdsa", + "ed25519-dalek", + "elliptic-curve", + "enum_dispatch", + "flate2", + "futures", + "generic-array 1.3.5", + "getrandom 0.2.17", + "hex-literal", + "hmac", + "home", + "inout 0.1.4", + "internal-russh-forked-ssh-key", + "libcrux-ml-kem", + "log", + "md5", + "num-bigint", + "p256", + "p384", + "p521", + "pageant", + "pbkdf2", + "pkcs5", + "pkcs8", + "rand 0.8.5", + "rand_core 0.6.4", + "ring", + "russh-cryptovec", + "russh-util", + "sec1 0.7.3", + "sha1", + "sha2 0.10.9", + "signature 2.2.0", + "spki", + "ssh-encoding 0.2.0", + "subtle", + "thiserror 1.0.69", + "tokio", + "typenum", + "yasna", + "zeroize", +] + +[[package]] +name = "russh-cryptovec" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb0ed583ff0f6b4aa44c7867dd7108df01b30571ee9423e250b4cc939f8c6cf" +dependencies = [ + "libc", + "log", + "nix", + "ssh-encoding 0.2.0", + "winapi", +] + +[[package]] +name = "russh-util" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668424a5dde0bcb45b55ba7de8476b93831b4aa2fa6947e145f3b053e22c60b6" +dependencies = [ + "chrono", + "tokio", + "wasm-bindgen", + "wasm-bindgen-futures", +] + +[[package]] +name = "rust_decimal" +version = "1.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ce901f9a19d251159075a4c37af514c3b8ef99c22e02dd8c19161cf397ee94a" +dependencies = [ + "arrayvec", + "borsh", + "bytes", + "num-traits", + "rand 0.8.5", + "rkyv", + "serde", + "serde_json", + "wasm-bindgen", +] + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hash" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustdct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b61555105d6a9bf98797c063c362a1d24ed8ab0431655e38f1cf51e52089551" +dependencies = [ + "rustfft", +] + +[[package]] +name = "rustfft" +version = "6.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21db5f9893e91f41798c88680037dba611ca6674703c1a18601b01a72c8adb89" +dependencies = [ + "num-complex", + "num-integer", + "num-traits", + "primal-check", + "strength_reduce", + "transpose", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe 0.1.6", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe 0.2.1", + "rustls-pki-types", + "schannel", + "security-framework 3.7.0", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "safe_arch" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher 0.4.4", +] + +[[package]] +name = "schannel" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "schemars" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +dependencies = [ + "dyn-clone", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.117", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scrypt" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f" +dependencies = [ + "pbkdf2", + "salsa20", + "sha2 0.10.9", +] + +[[package]] +name = "sea-bae" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f694a6ab48f14bc063cfadff30ab551d3c7e46d8f81836c51989d548f44a2a25" +dependencies = [ + "heck 0.4.1", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "sea-orm" +version = "2.0.0-rc.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b846dc1c7fefbea372c03765ff08307d68894bbad8c73b66176dcd53a3ee131" +dependencies = [ + "async-stream", + "async-trait", + "bigdecimal", + "chrono", + "derive_more", + "futures-util", + "itertools", + "log", + "mac_address", + "ouroboros", + "pgvector", + "rust_decimal", + "sea-orm-arrow", + "sea-orm-macros", + "sea-query", + "sea-query-sqlx", + "sea-schema", + "serde", + "serde_json", + "sqlx", + "strum", + "thiserror 2.0.18", + "time", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "sea-orm-arrow" +version = "2.0.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c2eee8405f16c1f337fe3a83389361caea83c928d14dbd666a480407072c365" +dependencies = [ + "arrow", + "sea-query", + "thiserror 2.0.18", +] + +[[package]] +name = "sea-orm-cli" +version = "2.0.0-rc.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9b34d4c8e615079c04eb7863a429c2d2a8bf9c934eb9eeb580f51f36367124" +dependencies = [ + "chrono", + "clap", + "dotenvy", + "glob", + "indoc", + "regex", + "tracing", + "tracing-subscriber", + "url", +] + +[[package]] +name = "sea-orm-macros" +version = "2.0.0-rc.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b449fe660e4d365f335222025df97ae01e670ef7ad788b3c67db9183b6cb0474" +dependencies = [ + "heck 0.5.0", + "itertools", + "pluralizer", + "proc-macro2", + "quote", + "sea-bae", + "syn 2.0.117", + "unicode-ident", +] + +[[package]] +name = "sea-orm-migration" +version = "2.0.0-rc.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3ceb928aac8be83332d34d1fdbc827d43696135a800723ffeb2e0b33b7b495e" +dependencies = [ + "async-trait", + "clap", + "dotenvy", + "sea-orm", + "sea-orm-cli", + "sea-schema", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "sea-query" +version = "1.0.0-rc.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58decdaaaf2a698170af2fa1b2e8f7b43a970e7768bf18aebaab113bada46354" +dependencies = [ + "chrono", + "inherent", + "itoa", + "ordered-float 4.6.0", + "rust_decimal", + "sea-query-derive", + "serde_json", + "time", + "uuid", +] + +[[package]] +name = "sea-query-derive" +version = "1.0.0-rc.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d88ad44b6ad9788c8b9476b6b91f94c7461d1e19d39cd8ea37838b1e6ff5aa8" +dependencies = [ + "darling", + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.117", + "thiserror 2.0.18", +] + +[[package]] +name = "sea-query-sqlx" +version = "0.8.0-rc.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4377164b09a11bb692dec6966eb0e6908d63d768defef0be689b39e02cf8544" +dependencies = [ + "sea-query", + "sqlx", +] + +[[package]] +name = "sea-schema" +version = "0.17.0-rc.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b363dd21c20fe4d1488819cb2bc7f8d4696c62dd9f39554f97639f54d57dd0ab" +dependencies = [ + "async-trait", + "sea-query", + "sea-query-sqlx", + "sea-schema-derive", + "sqlx", +] + +[[package]] +name = "sea-schema-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "debdc8729c37fdbf88472f97fd470393089f997a909e535ff67c544d18cfccf0" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct 0.2.0", + "der", + "generic-array 0.14.7", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "sec1" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d56d437c2f19203ce5f7122e507831de96f3d2d4d3be5af44a0b0a09d8a80e4d" +dependencies = [ + "base16ct 1.0.0", + "hybrid-array", +] + +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "serde", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float 2.10.1", + "serde", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.13.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "service" +version = "0.2.9" +dependencies = [ + "agent", + "anyhow", + "argon2", + "async-openai", + "avatar", + "base64 0.22.1", + "captcha-rs", + "chrono", + "config", + "db", + "deadpool-redis", + "email", + "futures", + "git", + "git2", + "hex", + "hmac", + "models", + "moka", + "queue", + "rand 0.10.0", + "redis", + "reqwest 0.13.2", + "room", + "rsa", + "rust_decimal", + "sea-orm", + "serde", + "serde_json", + "session", + "sha1", + "sha2 0.11.0", + "slog", + "tokio", + "tokio-stream", + "tracing", + "utoipa", + "uuid", +] + +[[package]] +name = "session" +version = "0.2.9" +dependencies = [ + "actix-service", + "actix-utils", + "actix-web", + "anyhow", + "deadpool-redis", + "derive_more", + "rand 0.10.0", + "redis", + "serde", + "serde_json", + "tokio", + "uuid", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "digest 0.10.7", +] + +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "446ba717509524cb3f22f17ecc096f10f4822d76ab5c0b9822c5f9c284e825f4" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "digest 0.11.2", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shellexpand" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32824fab5e16e6c4d86dc1ba84489390419a39f97699852b66480bb87d297ed8" +dependencies = [ + "dirs", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "signature" +version = "3.0.0-rc.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f1880df446116126965eeec169136b2e0251dba37c6223bcc819569550edea3" + +[[package]] +name = "simba" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c99284beb21666094ba2b75bbceda012e610f5479dfcc2d6e2426f53197ffd95" +dependencies = [ + "approx", + "num-complex", + "num-traits", + "paste", + "wide", +] + +[[package]] +name = "simd-adler32" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214" + +[[package]] +name = "simd_helpers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" +dependencies = [ + "quote", +] + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "slog" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b3b8565691b22d2bdfc066426ed48f837fc0c5f2c8cad8d9718f7f99d6995c1" +dependencies = [ + "anyhow", + "erased-serde", + "rustversion", + "serde_core", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64 0.22.1", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink", + "indexmap 2.13.0", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rust_decimal", + "serde", + "serde_json", + "sha2 0.10.9", + "smallvec", + "thiserror 2.0.18", + "time", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.117", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2 0.10.9", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.117", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags", + "byteorder", + "bytes", + "chrono", + "crc", + "digest 0.10.7", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array 0.14.7", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "rust_decimal", + "serde", + "sha1", + "sha2 0.10.9", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.18", + "time", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "rust_decimal", + "serde", + "serde_json", + "sha2 0.10.9", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.18", + "time", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.18", + "time", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "ssh-cipher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caac132742f0d33c3af65bfcde7f6aa8f62f0e991d80db99149eb9d44708784f" +dependencies = [ + "aes 0.8.4", + "aes-gcm 0.10.3", + "cbc", + "chacha20 0.9.1", + "cipher 0.4.4", + "ctr 0.9.2", + "poly1305 0.8.0", + "ssh-encoding 0.2.0", + "subtle", +] + +[[package]] +name = "ssh-cipher" +version = "0.3.0-rc.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20540e2cbcf285a8e0172717b3ae77ccc2bbf63f3967263ea71e8048173b09ff" +dependencies = [ + "aes 0.9.0-rc.4", + "aes-gcm 0.11.0-rc.3", + "chacha20 0.10.0", + "cipher 0.5.1", + "des", + "poly1305 0.9.0-rc.6", + "ssh-encoding 0.3.0-rc.8", + "zeroize", +] + +[[package]] +name = "ssh-encoding" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9242b9ef4108a78e8cd1a2c98e193ef372437f8c22be363075233321dd4a15" +dependencies = [ + "base64ct", + "bytes", + "pem-rfc7468 0.7.0", + "sha2 0.10.9", +] + +[[package]] +name = "ssh-encoding" +version = "0.3.0-rc.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af0ddb05d9c6034911bbdc541170b3068a2c019c7a10824e92921151563fb5af" +dependencies = [ + "base64ct", + "digest 0.11.2", + "pem-rfc7468 1.0.0", + "subtle", + "zeroize", +] + +[[package]] +name = "ssh-key" +version = "0.7.0-rc.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae7221717f89c8629a83ba265a004cb864df267485656f790444fff1b69fa36" +dependencies = [ + "rand_core 0.10.0", + "sec1 0.8.1", + "sha2 0.11.0", + "signature 3.0.0-rc.10", + "ssh-cipher 0.3.0-rc.8", + "ssh-encoding 0.3.0-rc.8", + "subtle", + "zeroize", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strength_reduce" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "sysinfo" +version = "0.38.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ab6a2f8bfe508deb3c6406578252e491d299cbbf3bc0529ecc3313aee4a52f" +dependencies = [ + "libc", + "memchr", + "ntapi", + "objc2-core-foundation", + "objc2-io-kit", + "windows", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tar" +version = "0.4.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22692a6476a21fa75fdfc11d452fda482af402c008cdbaf3476414e122040973" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom 0.4.2", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "tiff" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b63feaf3343d35b6ca4d50483f94843803b0f51634937cc2ec519fc32232bc52" +dependencies = [ + "fax", + "flate2", + "half", + "quick-error", + "weezl", + "zune-jpeg", +] + +[[package]] +name = "tiktoken-rs" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a19830747d9034cd9da43a60eaa8e552dfda7712424aebf187b7a60126bae0d" +dependencies = [ + "anyhow", + "base64 0.22.1", + "bstr", + "fancy-regex", + "lazy_static", + "regex", + "rustc-hash 1.1.0", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "js-sys", + "num-conv", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tls_codec" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de2e01245e2bb89d6f05801c564fa27624dbd7b1846859876c7dad82e90bf6b" +dependencies = [ + "tls_codec_derive", + "zeroize", +] + +[[package]] +name = "tls_codec_derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2e76690929402faae40aebdda620a2c0e25dd6d3b9afe48867dfd95991f4bd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tokio" +version = "1.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.6.3", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "futures-util", + "pin-project-lite", + "slab", + "tokio", +] + +[[package]] +name = "toml_datetime" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97251a7c317e03ad83774a8752a7e81fb6067740609f75ea2b585b569a59198f" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.25.8+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16bff38f1d86c47f9ff0647e6838d7bb362522bdf44006c7068c2b1e606f1f3c" +dependencies = [ + "indexmap 2.13.0", + "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2334f11ee363607eb04df9b8fc8a13ca1715a72ba8662a26ac285c98aabb4011" +dependencies = [ + "winnow", +] + +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.22.1", + "bytes", + "flate2", + "h2 0.4.13", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost", + "rustls-native-certs 0.8.3", + "rustls-pemfile", + "socket2 0.5.10", + "tokio", + "tokio-rustls", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "base64 0.22.1", + "bitflags", + "bytes", + "futures-util", + "http 1.4.0", + "http-body", + "iri-string", + "mime", + "pin-project-lite", + "tower 0.5.3", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "transport" +version = "0.2.9" + +[[package]] +name = "transpose" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e" +dependencies = [ + "num-integer", + "strength_reduce", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "ttf-parser" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31" + +[[package]] +name = "typed-path" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e28f89b80c87b8fb0cf04ab448d5dd0dd0ade2f8891bae878de66a75a28600e" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "unicase" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-normalization" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" + +[[package]] +name = "unicode-segmentation" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9629274872b2bfaf8d66f5f15725007f635594914870f65218920345aa11aa8c" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common 0.1.7", + "subtle", +] + +[[package]] +name = "universal-hash" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4987bdc12753382e0bec4a65c50738ffaabc998b9cdd1f952fb5f39b0048a96" +dependencies = [ + "crypto-common 0.2.1", + "ctutils", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "utoipa" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fcc29c80c21c31608227e0912b2d7fddba57ad76b606890627ba8ee7964e993" +dependencies = [ + "indexmap 2.13.0", + "serde", + "serde_json", + "utoipa-gen", +] + +[[package]] +name = "utoipa-gen" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d79d08d92ab8af4c5e8a6da20c47ae3f61a0f1dabc1997cdf2d082b757ca08b" +dependencies = [ + "proc-macro2", + "quote", + "regex", + "syn 2.0.117", + "uuid", +] + +[[package]] +name = "uuid" +version = "1.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" +dependencies = [ + "getrandom 0.4.2", + "js-sys", + "serde_core", + "wasm-bindgen", +] + +[[package]] +name = "v_frame" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2" +dependencies = [ + "aligned-vec", + "num-traits", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.115" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6523d69017b7633e396a89c5efab138161ed5aafcbc8d3e5c5a42ae38f50495a" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "serde", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d1faf851e778dfa54db7cd438b70758eba9755cb47403f3496edd7c8fc212f0" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.115" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e3a6c758eb2f701ed3d052ff5737f5bfe6614326ea7f3bbac7156192dc32e67" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.115" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "921de2737904886b52bcbb237301552d05969a6f9c40d261eb0533c8b055fedf" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.117", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.115" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a93e946af942b58934c604527337bad9ae33ba1d5c6900bbb41c2c07c2364a93" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84cde8507f4d7cfcb1185b8cb5890c494ffea65edbe1ba82cfd63661c805ed94" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webhook" +version = "0.2.9" + +[[package]] +name = "webpki-roots" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "weezl" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" + +[[package]] +name = "which" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81995fafaaaf6ae47a7d0cc83c67caf92aeb7e5331650ae6ff856f7c0c60c459" +dependencies = [ + "libc", +] + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + +[[package]] +name = "wide" +version = "0.7.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce5da8ecb62bcd8ec8b7ea19f69a51275e91299be594ea5cc6ef7819e16cd03" +dependencies = [ + "bytemuck", + "safe_arch", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" +dependencies = [ + "windows-collections", + "windows-core", + "windows-future", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" +dependencies = [ + "windows-core", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-future" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" +dependencies = [ + "windows-core", + "windows-link", + "windows-threading", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-numerics" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" +dependencies = [ + "windows-core", + "windows-link", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows-threading" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a90e88e4667264a994d34e6d1ab2d26d398dcdca8b7f52bec8668957517fc7d8" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap 2.13.0", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] + +[[package]] +name = "xxhash-rust" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" + +[[package]] +name = "y4m" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "bit-vec", + "num-bigint", +] + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "zip" +version = "8.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7756d0206d058333667493c4014f545f4b9603c4330ccd6d9b3f86dcab59f7d9" +dependencies = [ + "aes 0.8.4", + "bzip2", + "constant_time_eq", + "crc32fast", + "deflate64", + "flate2", + "getrandom 0.4.2", + "hmac", + "indexmap 2.13.0", + "lzma-rust2", + "memchr", + "pbkdf2", + "ppmd-rust", + "sha1", + "time", + "typed-path", + "zeroize", + "zopfli", + "zstd", +] + +[[package]] +name = "zlib-rs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be3d40e40a133f9c916ee3f9f4fa2d9d63435b5fbe1bfc6d9dae0aa0ada1513" + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zopfli" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249" +dependencies = [ + "bumpalo", + "crc32fast", + "log", + "simd-adler32", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "zune-core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9" + +[[package]] +name = "zune-inflate" +version = "0.2.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "zune-jpeg" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27bc9d5b815bc103f142aa054f561d9187d191692ec7c2d1e2b4737f8dbd7296" +dependencies = [ + "zune-core", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..3eb6ae7 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,185 @@ +[workspace] +members = [ + "libs/models", + "libs/session", + "libs/git", + "libs/email", + "libs/queue", + "libs/room", + "libs/config", + "libs/service", + "libs/db", + "libs/api", + "libs/webhook", + "libs/transport", + "libs/rpc", + "libs/avatar", + "libs/agent", + "libs/migrate", + "libs/agent-tool-derive", + "apps/migrate", + "apps/app", + "apps/git-hook", + "apps/gitserver", + "apps/email", + "apps/operator", +] + +resolver = "3" + +[workspace.dependencies] +models = { path = "libs/models" } +session = { path = "libs/session" } +git = { path = "libs/git" } +email = { path = "libs/email" } +queue = { path = "libs/queue" } +room = { path = "libs/room" } +config = { path = "libs/config" } +service = { path = "libs/service" } +db = { path = "libs/db" } +api = { path = "libs/api" } +agent = { path = "libs/agent" } +webhook = { path = "libs/webhook" } +rpc = { path = "libs/rpc" } +avatar = { path = "libs/avatar" } +migrate = { path = "libs/migrate" } + +sea-query = "1.0.0-rc.31" + +actix-web = "4.13.0" +actix-files = "0.6.10" +actix-cors = "0.7.1" +actix-session = "0.11.0" +actix-ws = "0.4.0" +actix-multipart = "0.7.2" +actix-analytics = "1.2.1" +actix-jwt-session = "1.0.7" +actix-csrf = "0.8.0" +actix-rt = "2.11.0" +actix = "0.13" +async-stream = "0.3" +async-nats = "0.47.0" +actix-service = "2.0.3" +actix-utils = "3.0.1" +redis = "1.1.0" +anyhow = "1.0.102" +derive_more = "2.1.1" +blake3 = "1.8.3" +argon2 = "0.5.3" +thiserror = "2.0.18" +password-hash = "0.6.0" +awc = "3.8.2" +bstr = "1.12.1" +captcha-rs = "0.5.0" +deadpool-redis = "0.23.0" +deadpool = "0.13.0" +dotenv = "0.15.0" +env_logger = "0.11.10" +flate2 = "1.1.9" +git2 = "0.20.4" +slog = "2.8.2" +git2-ext = "1.0.0" +git2-hooks = "0.7.0" +futures = "0.3.32" +futures-util = "0.3.32" +globset = "0.4.18" +hex = "0.4.3" +lettre = { version = "0.11.19", default-features = false, features = ["tokio1-rustls-tls", "smtp-transport", "builder", "pool"] } +kube = { version = "0.98", features = ["derive", "runtime"] } +k8s-openapi = { version = "0.24", default-features = false, features = ["v1_28", "schemars"] } +mime = "0.3.17" +mime_guess2 = "2.3.1" +opentelemetry = "0.31.0" +opentelemetry-otlp = "0.31.0" +opentelemetry_sdk = "0.31.0" +opentelemetry-http = "0.31.0" +prost = "0.14.3" +prost-build = "0.14.3" +qdrant-client = "1.17.0" +rand = "0.10.0" +russh = { version = "0.55.0", default-features = false } +hmac = { version = "0.12.1", features = ["std"] } +sha1_smol = "1.0.1" +rsa = { version = "0.9.7", package = "rsa" } +reqwest = { version = "0.13.2", default-features = false } +dotenvy = "0.15.7" +aws-sdk-s3 = "1.127.0" +sea-orm = "2.0.0-rc.37" +sea-orm-migration = "2.0.0-rc.37" +sha1 = { version = "0.10.6", features = ["compress"] } +sha2 = "0.11.0-rc.5" +sysinfo = "0.38.4" +ssh-key = "0.7.0-rc.9" +tar = "0.4.45" +zip = "8.3.1" +tokenizer = "0.1.2" +tiktoken-rs = "0.9.1" +regex = "1.12.3" +jsonwebtoken = "10.3.0" +once_cell = "1.21.4" +async-trait = "0.1.89" +fs2 = "0.4.3" +image = "0.25.10" +tokio = "1.50.0" +tokio-util = "0.7.18" +tokio-stream = "0.1.18" +url = "2.5.8" +num_cpus = "1.17.0" +clap = "4.6.0" +time = "0.3.47" +chrono = "0.4.44" +tracing = "0.1.44" +tracing-subscriber = "0.3.23" +tracing-opentelemetry = "0.32.1" +tonic = "0.14.5" +tonic-build = "0.14.5" +uuid = "1.22.0" +async-openai = { version = "0.34.0", features = ["embedding", "chat-completion"] } +hostname = "0.4" +utoipa = { version = "5.4.0", features = ["chrono", "uuid"] } +rust_decimal = "1.40.0" +walkdir = "2.5.0" +moka = "0.12.15" +serde = "1.0.228" +serde_json = "1.0.149" +serde_yaml = "0.9.33" +serde_bytes = "0.11.19" +base64 = "0.22.1" + + + + +[workspace.package] +version = "0.2.9" +edition = "2024" +authors = [] +description = "" +repository = "" +readme = "" +homepage = "" +license = "" +keywords = [] +categories = [] +documentation = "" + +[workspace.lints.rust] +unsafe_code = "warn" + +[workspace.lints.clippy] +unwrap_used = "warn" +expect_used = "warn" + +[profile.dev] +debug = 1 +incremental = true +codegen-units = 256 + +[profile.release] +lto = "thin" +codegen-units = 1 +strip = true +opt-level = 3 + + +[profile.dev.package.num-bigint-dig] +opt-level = 3 \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..6cf9b6f --- /dev/null +++ b/README.md @@ -0,0 +1,263 @@ +# Code API + +> 一个现代化的代码协作与团队沟通平台,融合 GitHub 的代码管理体验与 Slack 的实时沟通功能。 + +## 项目概述 + +Code API 是一个全栈 monorepo 项目,采用 Rust 后端 + React 前端的技术栈。项目实现了类似 GitHub 的 Issue 追踪、Pull Request 代码审查、Git 仓库管理,以及类似 Slack 的实时聊天 Room 功能。 + +### 核心功能 + +- **代码仓库管理** — Git 仓库浏览、分支管理、文件操作 +- **Issue 追踪** — 创建、分配、标签、评论 Issue +- **Pull Request** — 代码审查、Inline Comment、CI 状态检查 +- **实时聊天 (Room)** — 团队频道、消息回复、Thread 讨论 +- **通知系统** — 邮件通知、Webhook 集成 +- **用户系统** — 认证、会话管理、权限控制 + +## 技术栈 + +### 后端 (Rust) + +| 类别 | 技术 | +|------|------| +| 语言 | Rust 2024 Edition | +| Web 框架 | Actix-web | +| ORM | SeaORM | +| 数据库 | PostgreSQL | +| 缓存 | Redis | +| 实时通信 | WebSocket (actix-ws) | +| 消息队列 | NATS | +| 向量数据库 | Qdrant | +| Git 操作 | git2 / git2-ext | +| 认证 | JWT + Session | +| API 文档 | utoipa (OpenAPI) | + +### 前端 (TypeScript/React) + +| 类别 | 技术 | +|------|------| +| 语言 | TypeScript 5.9 | +| 框架 | React 19 | +| 路由 | React Router v7 | +| 构建工具 | Vite 8 + SWC | +| UI 组件 | shadcn/ui + Tailwind CSS 4 | +| 状态管理 | TanStack Query | +| HTTP 客户端 | Axios + OpenAPI 生成 | +| Markdown | react-markdown + Shiki | +| 拖拽 | dnd-kit | + +## 项目结构 + +``` +code/ +├── apps/ # 应用程序入口 +│ ├── app/ # 主 Web 应用 +│ ├── gitserver/ # Git HTTP/SSH 服务器 +│ ├── git-hook/ # Git Hook 处理服务 +│ ├── email/ # 邮件发送服务 +│ ├── migrate/ # 数据库迁移工具 +│ └── operator/ # Kubernetes 操作器 +├── libs/ # 共享库 +│ ├── api/ # REST API 路由与处理器 +│ ├── models/ # 数据库模型 (SeaORM) +│ ├── service/ # 业务逻辑层 +│ ├── db/ # 数据库连接池 +│ ├── config/ # 配置管理 +│ ├── session/ # 会话管理 +│ ├── git/ # Git 操作封装 +│ ├── room/ # 实时聊天服务 +│ ├── queue/ # 消息队列 +│ ├── webhook/ # Webhook 处理 +│ ├── rpc/ # RPC 服务 (gRPC/Tonic) +│ ├── email/ # 邮件发送 +│ ├── agent/ # AI Agent 集成 +│ ├── avatar/ # 头像处理 +│ ├── transport/ # 传输层 +│ └── migrate/ # 迁移脚本 +├── src/ # 前端源代码 +│ ├── app/ # 页面路由组件 +│ ├── components/ # 可复用组件 +│ ├── contexts/ # React Context +│ ├── client/ # API 客户端 (OpenAPI 生成) +│ ├── hooks/ # 自定义 Hooks +│ └── lib/ # 工具函数 +├── docker/ # Docker 配置 +├── scripts/ # 构建脚本 +├── openapi.json # OpenAPI 规范文件 +└── Cargo.toml # Rust Workspace 配置 +``` + +## 快速开始 + +### 环境要求 + +- **Rust**: 最新稳定版 (Edition 2024) +- **Node.js**: >= 20 +- **pnpm**: >= 10 +- **PostgreSQL**: >= 14 +- **Redis**: >= 6 + +### 安装步骤 + +1. **克隆仓库** + ```bash + git clone <repository-url> + cd code + ``` + +2. **配置环境变量** + ```bash + cp .env.example .env + # 编辑 .env 文件,配置数据库连接等信息 + ``` + +3. **启动数据库与 Redis** + ```bash + # 使用 Docker 启动(推荐) + docker compose -f docker/docker-compose.yml up -d + ``` + +4. **数据库迁移** + ```bash + cargo run -p migrate + ``` + +5. **启动后端服务** + ```bash + cargo run -p app + ``` + +6. **启动前端开发服务器** + ```bash + pnpm install + pnpm dev + ``` + +7. **访问应用** + - 前端: http://localhost:5173 + - 后端 API: http://localhost:8080 + +## 开发指南 + +### 后端开发 + +```bash +# 运行所有测试 +cargo test + +# 运行特定模块测试 +cargo test -p service + +# 检查代码质量 +cargo clippy --workspace + +# 格式化代码 +cargo fmt --workspace + +# 生成 OpenAPI 文档 +pnpm openapi:gen-json +``` + +### 前端开发 + +```bash +# 安装依赖 +pnpm install + +# 启动开发服务器 +pnpm dev + +# 构建生产版本 +pnpm build + +# 代码检查 +pnpm lint + +# 生成 OpenAPI 客户端 +pnpm openapi:gen +``` + +### 数据库迁移 + +```bash +# 创建新迁移 +cd libs/migrate && cargo run -- create <migration_name> + +# 执行迁移 +cargo run -p migrate +``` + +## 配置说明 + +### 必需配置项 + +| 变量名 | 说明 | 示例 | +|--------|------|------| +| `APP_DATABASE_URL` | PostgreSQL 连接 | `postgresql://user:pass@localhost/db` | +| `APP_REDIS_URL` | Redis 连接 | `redis://localhost:6379` | +| `APP_AI_API_KEY` | AI 服务 API Key | `sk-xxxxx` | +| `APP_SMTP_*` | SMTP 邮件配置 | 见 `.env.example` | + +### 可选配置项 + +| 变量名 | 默认值 | 说明 | +|--------|--------|------| +| `APP_DATABASE_MAX_CONNECTIONS` | 10 | 数据库连接池大小 | +| `APP_LOG_LEVEL` | info | 日志级别 | +| `APP_QDRANT_URL` | - | 向量数据库地址 | +| `APP_REPOS_ROOT` | /data/repos | Git 仓库存储路径 | + +完整配置请参考 `.env.example`。 + +## API 文档 + +启动服务后访问 http://localhost:8080/swagger-ui 查看完整的 API 文档。 + +## 架构设计 + +### 后端分层架构 + +``` +┌─────────────────────────────────────┐ +│ apps/app │ ← 应用入口 +├─────────────────────────────────────┤ +│ libs/api │ ← HTTP 路由/Handler +├─────────────────────────────────────┤ +│ libs/service │ ← 业务逻辑层 +├─────────────────────────────────────┤ +│ libs/models │ libs/db │ libs/git│ ← 数据访问层 +├─────────────────────────────────────┤ +│ PostgreSQL │ Redis │ Qdrant │ ← 存储层 +└─────────────────────────────────────┘ +``` + +### 前端目录结构 + +``` +src/ +├── app/ # 页面级组件 (按功能模块组织) +│ ├── project/ # 项目相关页面 (Issue、Settings) +│ ├── repository/ # 仓库相关页面 (PR、代码浏览) +│ └── settings/ # 用户设置 +├── components/ # 可复用组件 +│ ├── ui/ # 基础 UI 组件 (shadcn) +│ ├── project/ # 项目相关组件 +│ ├── repository/ # 仓库相关组件 +│ └── room/ # 聊天相关组件 +├── contexts/ # React Context (用户、聊天室等) +├── client/ # OpenAPI 生成的客户端 +└── lib/ # 工具函数与 Hooks +``` + +## 任务清单 + +项目当前开发任务详见 [task.md](./task.md),按优先级分为: + +- **P0** — 阻塞性问题(核心流程不通) +- **P1** — 核心体验(关键功能) +- **P2** — 体验优化(增强功能) + +## 许可证 + +[待添加] diff --git a/apps/app/Cargo.toml b/apps/app/Cargo.toml new file mode 100644 index 0000000..83a12ec --- /dev/null +++ b/apps/app/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "app" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[dependencies] +tokio = { workspace = true, features = ["full"] } +uuid = { workspace = true } +service = { workspace = true } +api = { workspace = true } +session = { workspace = true } +config = { workspace = true } +db = { workspace = true } +migrate = { workspace = true } +actix-web = { workspace = true } +actix-cors = { workspace = true } +futures = { workspace = true } +slog = "2" +anyhow = { workspace = true } +clap = { workspace = true } +sea-orm = { workspace = true } +serde_json = { workspace = true } +chrono = { workspace = true } +[lints] +workspace = true diff --git a/apps/app/src/args.rs b/apps/app/src/args.rs new file mode 100644 index 0000000..3433ce5 --- /dev/null +++ b/apps/app/src/args.rs @@ -0,0 +1,12 @@ +use clap::Parser; + +#[derive(Parser, Debug)] +#[command(name = "app")] +#[command(version)] +pub struct ServerArgs { + #[arg(long, short)] + pub bind: Option<String>, + + #[arg(long)] + pub workers: Option<usize>, +} diff --git a/apps/app/src/logging.rs b/apps/app/src/logging.rs new file mode 100644 index 0000000..26a1068 --- /dev/null +++ b/apps/app/src/logging.rs @@ -0,0 +1,126 @@ +//! Structured HTTP request logging middleware using slog. +//! +//! Logs every incoming request with method, path, status code, +//! response time, client IP, and authenticated user ID. + +use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform}; +use futures::future::{LocalBoxFuture, Ready, ok}; +use session::SessionExt; +use slog::{error as slog_error, info as slog_info, warn as slog_warn}; +use std::sync::Arc; +use std::task::{Context, Poll}; +use std::time::Instant; +use uuid::Uuid; + +/// Default log format: `{method} {path} {status} {duration_ms}ms` +pub struct RequestLogger { + log: slog::Logger, +} + +impl RequestLogger { + pub fn new(log: slog::Logger) -> Self { + Self { log } + } +} + +impl<S, B> Transform<S, ServiceRequest> for RequestLogger +where + S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error> + 'static, + S::Future: 'static, + B: 'static, +{ + type Response = ServiceResponse<B>; + type Error = actix_web::Error; + type Transform = RequestLoggerMiddleware<S>; + type InitError = (); + type Future = Ready<Result<Self::Transform, Self::InitError>>; + + fn new_transform(&self, service: S) -> Self::Future { + ok(RequestLoggerMiddleware { + service: Arc::new(service), + log: self.log.clone(), + }) + } +} + +pub struct RequestLoggerMiddleware<S> { + service: Arc<S>, + log: slog::Logger, +} + +impl<S> Clone for RequestLoggerMiddleware<S> { + fn clone(&self) -> Self { + Self { + service: self.service.clone(), + log: self.log.clone(), + } + } +} + +impl<S, B> Service<ServiceRequest> for RequestLoggerMiddleware<S> +where + S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error> + 'static, + S::Future: 'static, + B: 'static, +{ + type Response = ServiceResponse<B>; + type Error = actix_web::Error; + type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>; + + fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { + self.service.poll_ready(cx) + } + + fn call(&self, req: ServiceRequest) -> Self::Future { + let started = Instant::now(); + let log = self.log.clone(); + let method = req.method().to_string(); + let path = req.path().to_string(); + let query = req.query_string().to_string(); + let remote = req + .connection_info() + .realip_remote_addr() + .map(|s| s.to_string()) + .unwrap_or_else(|| "unknown".to_string()); + let user_id: Option<Uuid> = req.get_session().user(); + + let full_path = if query.is_empty() { + path.clone() + } else { + format!("{}?{}", path, query) + }; + + // Clone the Arc<S> so it can be moved into the async block + let service = self.service.clone(); + + Box::pin(async move { + let res = service.call(req).await?; + let elapsed = started.elapsed(); + let status = res.status(); + let status_code = status.as_u16(); + let is_health = path == "/health"; + + if !is_health { + let user_id_str = user_id + .map(|u: Uuid| u.to_string()) + .unwrap_or_else(|| "-".to_string()); + let log_message = format!( + "HTTP request | method={} | path={} | status={} | duration_ms={} | remote={} | user_id={}", + method, + full_path, + status_code, + elapsed.as_millis(), + remote, + user_id_str + ); + + match status_code { + 200..=299 => slog_info!(&log, "{}", log_message), + 400..=499 => slog_warn!(&log, "{}", log_message), + _ => slog_error!(&log, "{}", log_message), + } + } + Ok(res) + }) + } +} diff --git a/apps/app/src/main.rs b/apps/app/src/main.rs new file mode 100644 index 0000000..4087380 --- /dev/null +++ b/apps/app/src/main.rs @@ -0,0 +1,210 @@ +use actix_cors::Cors; +use actix_web::cookie::time::Duration; +use actix_web::middleware::Logger; +use actix_web::{App, HttpResponse, HttpServer, cookie::Key, web}; +use clap::Parser; +use db::cache::AppCache; +use db::database::AppDatabase; +use sea_orm::ConnectionTrait; +use service::AppService; +use session::SessionMiddleware; +use session::config::{PersistentSession, SessionLifecycle, TtlExtensionPolicy}; +use session::storage::RedisClusterSessionStore; +use slog::Drain; + +mod args; +mod logging; + +use args::ServerArgs; +use config::AppConfig; +use migrate::{Migrator, MigratorTrait}; + +#[derive(Clone)] +pub struct AppState { + pub db: AppDatabase, + pub cache: AppCache, +} + +fn build_slog_logger(level: &str) -> slog::Logger { + let level_filter = match level { + "trace" => 0usize, + "debug" => 1usize, + "info" => 2usize, + "warn" => 3usize, + "error" => 4usize, + _ => 2usize, + }; + + struct StderrDrain(usize); + + impl Drain for StderrDrain { + type Ok = (); + type Err = (); + #[inline] + fn log(&self, record: &slog::Record, _logger: &slog::OwnedKVList) -> Result<(), ()> { + let slog_level = match record.level() { + slog::Level::Trace => 0, + slog::Level::Debug => 1, + slog::Level::Info => 2, + slog::Level::Warning => 3, + slog::Level::Error => 4, + slog::Level::Critical => 5, + }; + if slog_level < self.0 { + return Ok(()); + } + let _ = eprintln!( + "{} [{}] {}:{} - {}", + chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S%.3fZ"), + record.level().to_string(), + record + .file() + .rsplit_once('/') + .map(|(_, s)| s) + .unwrap_or(record.file()), + record.line(), + record.msg(), + ); + Ok(()) + } + } + + let drain = StderrDrain(level_filter); + let drain = std::sync::Mutex::new(drain); + let drain = slog::Fuse::new(drain); + slog::Logger::root(drain, slog::o!()) +} + +fn build_session_key(cfg: &AppConfig) -> anyhow::Result<Key> { + if let Some(secret) = cfg.env.get("APP_SESSION_SECRET") { + let bytes: Vec<u8> = secret.as_bytes().iter().cycle().take(64).copied().collect(); + return Ok(Key::from(&bytes)); + } + Ok(Key::generate()) +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cfg = AppConfig::load(); + let log_level = cfg.log_level().unwrap_or_else(|_| "info".to_string()); + let log = build_slog_logger(&log_level); + slog::info!( + log, + "Starting {} {}", + cfg.app_name().unwrap_or_default(), + cfg.app_version().unwrap_or_default() + ); + let db = AppDatabase::init(&cfg).await?; + slog::info!(log, "Database connected"); + let redis_urls = cfg.redis_urls()?; + let store: RedisClusterSessionStore = RedisClusterSessionStore::new(redis_urls).await?; + slog::info!(log, "Redis connected"); + let cache = AppCache::init(&cfg).await?; + slog::info!(log, "Cache initialized"); + run_migrations(&db, &log).await?; + let session_key = build_session_key(&cfg)?; + let args = ServerArgs::parse(); + let service = AppService::new(cfg.clone()).await?; + slog::info!(log, "AppService initialized"); + + let (shutdown_tx, shutdown_rx) = tokio::sync::broadcast::channel::<()>(1); + let worker_service = service.clone(); + let log_for_http = log.clone(); + let log_for_worker = log.clone(); + let worker_handle = tokio::spawn(async move { + worker_service + .start_room_workers(shutdown_rx, log_for_worker) + .await + }); + + let bind_addr = args.bind.unwrap_or_else(|| "127.0.0.1:8080".to_string()); + slog::info!(log, "Listening on {}", bind_addr); + HttpServer::new(move || { + let cors = Cors::default() + .allow_any_origin() + .allow_any_method() + .allow_any_header() + .supports_credentials() + .max_age(3600); + + let session_mw = SessionMiddleware::builder(store.clone(), session_key.clone()) + .cookie_name("id".to_string()) + .cookie_path("/".to_string()) + .cookie_secure(false) + .cookie_http_only(true) + .session_lifecycle(SessionLifecycle::PersistentSession( + PersistentSession::default() + .session_ttl(Duration::days(30)) + .session_ttl_extension_policy(TtlExtensionPolicy::OnEveryRequest), + )) + .build(); + + App::new() + .wrap(cors) + .wrap(session_mw) + .wrap(Logger::default().exclude("/health")) + .app_data(web::Data::new(AppState { + db: db.clone(), + cache: cache.clone(), + })) + .app_data(web::Data::new(service.clone())) + .app_data(web::Data::new(cfg.clone())) + .app_data(web::Data::new(db.clone())) + .app_data(web::Data::new(cache.clone())) + .wrap(logging::RequestLogger::new(log_for_http.clone())) + .route("/health", web::get().to(health_check)) + .configure(api::route::init_routes) + }) + .bind(&bind_addr)? + .run() + .await?; + + slog::info!(log, "Server stopped, shutting down room workers"); + let _ = shutdown_tx.send(()); + let _ = worker_handle.await; + slog::info!(log, "Room workers stopped"); + + Ok(()) +} + +async fn run_migrations(db: &AppDatabase, log: &slog::Logger) -> anyhow::Result<()> { + slog::info!(log, "Running database migrations..."); + Migrator::up(db.writer(), None) + .await + .map_err(|e| anyhow::anyhow!("Migration failed: {:?}", e))?; + slog::info!(log, "Migrations completed"); + Ok(()) +} + +async fn health_check(state: web::Data<AppState>) -> HttpResponse { + let db_ok = db_ping(&state.db).await; + let cache_ok = cache_ping(&state.cache).await; + + let healthy = db_ok && cache_ok; + if healthy { + HttpResponse::Ok().json(serde_json::json!({ + "status": "ok", + "db": "ok", + "cache": "ok", + })) + } else { + HttpResponse::ServiceUnavailable().json(serde_json::json!({ + "status": "unhealthy", + "db": if db_ok { "ok" } else { "error" }, + "cache": if cache_ok { "ok" } else { "error" }, + })) + } +} + +async fn db_ping(db: &AppDatabase) -> bool { + db.query_one_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "SELECT 1", + )) + .await + .is_ok() +} + +async fn cache_ping(cache: &AppCache) -> bool { + cache.conn().await.is_ok() +} diff --git a/apps/email/Cargo.toml b/apps/email/Cargo.toml new file mode 100644 index 0000000..ad888ad --- /dev/null +++ b/apps/email/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "email-server" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[[bin]] +name = "email-worker" +path = "src/main.rs" + +[dependencies] +tokio = { workspace = true, features = ["full"] } +service = { workspace = true } +db = { workspace = true } +config = { workspace = true } +slog = { workspace = true } +anyhow = { workspace = true } +clap = { workspace = true, features = ["derive"] } +chrono = { workspace = true, features = ["serde"] } + +[lints] +workspace = true diff --git a/apps/email/src/main.rs b/apps/email/src/main.rs new file mode 100644 index 0000000..d08a6b1 --- /dev/null +++ b/apps/email/src/main.rs @@ -0,0 +1,84 @@ +use clap::Parser; +use config::AppConfig; +use service::AppService; +use slog::{Drain, OwnedKVList, Record}; + +#[derive(Parser, Debug)] +#[command(name = "email-worker")] +#[command(version)] +struct Args { + #[arg(long, default_value = "info")] + log_level: String, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let args = Args::parse(); + let cfg = AppConfig::load(); + let log = build_logger(&args.log_level); + + slog::info!(log, "Starting email worker"); + let service = AppService::new(cfg).await?; + + let (shutdown_tx, shutdown_rx) = tokio::sync::broadcast::channel::<()>(1); + let log_for_signal = log.clone(); + tokio::spawn(async move { + tokio::signal::ctrl_c().await.ok(); + slog::info!(log_for_signal, "shutting down email worker"); + let _ = shutdown_tx.send(()); + }); + + service.start_email_workers(shutdown_rx).await?; + slog::info!(log, "email worker stopped"); + Ok(()) +} + +fn build_logger(level: &str) -> slog::Logger { + let level_filter = match level { + "trace" => 0usize, + "debug" => 1usize, + "info" => 2usize, + "warn" => 3usize, + "error" => 4usize, + _ => 2usize, + }; + + struct StderrDrain(usize); + + impl Drain for StderrDrain { + type Ok = (); + type Err = (); + #[inline] + fn log(&self, record: &Record, _logger: &OwnedKVList) -> Result<(), ()> { + let slog_level = match record.level() { + slog::Level::Trace => 0, + slog::Level::Debug => 1, + slog::Level::Info => 2, + slog::Level::Warning => 3, + slog::Level::Error => 4, + slog::Level::Critical => 5, + }; + if slog_level < self.0 { + return Ok(()); + } + let _ = eprintln!( + "{} [{}] {}:{} - {}", + chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S%.3fZ"), + record.level().to_string(), + record + .file() + .rsplit_once('/') + .map(|(_, s)| s) + .unwrap_or(record.file()), + record.line(), + record.msg(), + ); + Ok(()) + } + } + + let drain = StderrDrain(level_filter); + let drain = std::sync::Mutex::new(drain); + let drain = slog::Fuse::new(drain); + slog::Logger::root(drain, slog::o!()) +} diff --git a/apps/git-hook/Cargo.toml b/apps/git-hook/Cargo.toml new file mode 100644 index 0000000..2cef8bb --- /dev/null +++ b/apps/git-hook/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "git-hook" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[dependencies] +tokio = { workspace = true, features = ["full"] } +git = { workspace = true } +db = { workspace = true } +config = { workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["json"] } +anyhow = { workspace = true } +slog = { workspace = true } +clap = { workspace = true, features = ["derive"] } +tokio-util = { workspace = true } +chrono = { workspace = true, features = ["serde"] } +reqwest = { workspace = true } diff --git a/apps/git-hook/src/args.rs b/apps/git-hook/src/args.rs new file mode 100644 index 0000000..d6dd9b9 --- /dev/null +++ b/apps/git-hook/src/args.rs @@ -0,0 +1,10 @@ +use clap::Parser; + +#[derive(Parser, Debug)] +#[command(name = "git-hook")] +#[command(version)] +pub struct HookArgs { + /// Worker ID for this instance. Defaults to the HOOK_POOL_WORKER_ID env var or a generated UUID. + #[arg(long)] + pub worker_id: Option<String>, +} diff --git a/apps/git-hook/src/main.rs b/apps/git-hook/src/main.rs new file mode 100644 index 0000000..aaf762f --- /dev/null +++ b/apps/git-hook/src/main.rs @@ -0,0 +1,142 @@ +use clap::Parser; +use config::AppConfig; +use db::cache::AppCache; +use db::database::AppDatabase; +use git::hook::GitServiceHooks; +use slog::{Drain, OwnedKVList, Record}; +use tokio::signal; +use tokio_util::sync::CancellationToken; + +mod args; + +use args::HookArgs; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + // 1. Load configuration + let cfg = AppConfig::load(); + + // 2. Init slog logging + let log_level = cfg.log_level().unwrap_or_else(|_| "info".to_string()); + let log = build_slog_logger(&log_level); + + // 3. Connect to database + let db = AppDatabase::init(&cfg).await?; + slog::info!(log, "database connected"); + + // 4. Connect to Redis cache (also provides the cluster pool for hook queue) + let cache = AppCache::init(&cfg).await?; + slog::info!(log, "cache connected"); + + // 5. Parse CLI args + let args = HookArgs::parse(); + + slog::info!(log, "git-hook worker starting"; + "worker_id" => %args.worker_id.unwrap_or_else(|| "default".to_string()) + ); + + // 5. Build HTTP client for webhook delivery + let http = reqwest::Client::builder() + .user_agent("Code-Git-Hook/1.0") + .build() + .unwrap_or_else(|_| reqwest::Client::new()); + + // 6. Build and run git hook service + let hooks = GitServiceHooks::new( + db, + cache.clone(), + cache.redis_pool().clone(), + log.clone(), + cfg, + std::sync::Arc::new(http), + ); + + let cancel = CancellationToken::new(); + let cancel_clone = cancel.clone(); + + // Spawn signal handler + let log_clone = log.clone(); + tokio::spawn(async move { + let ctrl_c = async { + signal::ctrl_c() + .await + .expect("failed to install CTRL+C handler"); + }; + + #[cfg(unix)] + let term = async { + use tokio::signal::unix::{SignalKind, signal}; + let mut sig = + signal(SignalKind::terminate()).expect("failed to install SIGTERM handler"); + sig.recv().await; + }; + + #[cfg(not(unix))] + let term = std::future::pending::<()>(); + + tokio::select! { + _ = ctrl_c => { + slog::info!(log_clone, "received SIGINT, initiating shutdown"); + } + _ = term => { + slog::info!(log_clone, "received SIGTERM, initiating shutdown"); + } + } + cancel_clone.cancel(); + }); + + hooks.run(cancel).await?; + + slog::info!(log, "git-hook worker stopped"); + Ok(()) +} + +fn build_slog_logger(level: &str) -> slog::Logger { + let level_filter = match level { + "trace" => 0usize, + "debug" => 1usize, + "info" => 2usize, + "warn" => 3usize, + "error" => 4usize, + _ => 2usize, + }; + + struct StderrDrain(usize); + + impl Drain for StderrDrain { + type Ok = (); + type Err = (); + #[inline] + fn log(&self, record: &Record, _logger: &OwnedKVList) -> Result<(), ()> { + let slog_level = match record.level() { + slog::Level::Trace => 0, + slog::Level::Debug => 1, + slog::Level::Info => 2, + slog::Level::Warning => 3, + slog::Level::Error => 4, + slog::Level::Critical => 5, + }; + if slog_level < self.0 { + return Ok(()); + } + let _ = eprintln!( + "{} [{}] {}:{} - {}", + chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S%.3fZ"), + record.level().to_string(), + record + .file() + .rsplit_once('/') + .map(|(_, s)| s) + .unwrap_or(record.file()), + record.line(), + record.msg(), + ); + Ok(()) + } + } + + let drain = StderrDrain(level_filter); + let drain = std::sync::Mutex::new(drain); + let drain = slog::Fuse::new(drain); + slog::Logger::root(drain, slog::o!()) +} diff --git a/apps/gitserver/Cargo.toml b/apps/gitserver/Cargo.toml new file mode 100644 index 0000000..5b11284 --- /dev/null +++ b/apps/gitserver/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "gitserver" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[[bin]] +name = "gitserver" +path = "src/main.rs" + +[dependencies] +tokio = { workspace = true, features = ["full"] } +git = { workspace = true } +db = { workspace = true } +config = { workspace = true } +slog = { workspace = true } +anyhow = { workspace = true } +clap = { workspace = true, features = ["derive"] } +chrono = { workspace = true, features = ["serde"] } + +[lints] +workspace = true diff --git a/apps/gitserver/src/main.rs b/apps/gitserver/src/main.rs new file mode 100644 index 0000000..17c077f --- /dev/null +++ b/apps/gitserver/src/main.rs @@ -0,0 +1,94 @@ +use clap::Parser; +use config::AppConfig; +use slog::{Drain, OwnedKVList, Record}; + +#[derive(Parser, Debug)] +#[command(name = "gitserver")] +#[command(version)] +struct Args { + #[arg(long, default_value = "info")] + log_level: String, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let args = Args::parse(); + let cfg = AppConfig::load(); + let log = build_logger(&args.log_level); + + let http_handle = tokio::spawn(git::http::run_http(cfg.clone(), log.clone())); + let ssh_handle = tokio::spawn(git::ssh::run_ssh(cfg, log.clone())); + + tokio::select! { + result = http_handle => { + match result { + Ok(Ok(())) => slog::info!(log, "HTTP server stopped"), + Ok(Err(e)) => slog::error!(log, "HTTP server error: {}", e), + Err(e) => slog::error!(log, "HTTP server task panicked: {}", e), + } + } + result = ssh_handle => { + match result { + Ok(Ok(())) => slog::info!(log, "SSH server stopped"), + Ok(Err(e)) => slog::error!(log, "SSH server error: {}", e), + Err(e) => slog::error!(log, "SSH server task panicked: {}", e), + } + } + _ = tokio::signal::ctrl_c() => { + slog::info!(log, "received shutdown signal"); + } + } + + slog::info!(log, "shutting down"); + Ok(()) +} + +fn build_logger(level: &str) -> slog::Logger { + let level_filter = match level { + "trace" => 0usize, + "debug" => 1usize, + "info" => 2usize, + "warn" => 3usize, + "error" => 4usize, + _ => 2usize, + }; + + struct StderrDrain(usize); + + impl Drain for StderrDrain { + type Ok = (); + type Err = (); + #[inline] + fn log(&self, record: &Record, _logger: &OwnedKVList) -> Result<(), ()> { + let slog_level = match record.level() { + slog::Level::Trace => 0, + slog::Level::Debug => 1, + slog::Level::Info => 2, + slog::Level::Warning => 3, + slog::Level::Error => 4, + slog::Level::Critical => 5, + }; + if slog_level < self.0 { + return Ok(()); + } + let _ = eprintln!( + "{} [{}] {}:{} - {}", + chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S%.3fZ"), + record.level().to_string(), + record + .file() + .rsplit_once('/') + .map(|(_, s)| s) + .unwrap_or(record.file()), + record.line(), + record.msg(), + ); + Ok(()) + } + } + + let drain = StderrDrain(level_filter); + let drain = std::sync::Mutex::new(drain); + let drain = slog::Fuse::new(drain); + slog::Logger::root(drain, slog::o!()) +} diff --git a/apps/migrate/Cargo.toml b/apps/migrate/Cargo.toml new file mode 100644 index 0000000..3fe1bbe --- /dev/null +++ b/apps/migrate/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "migrate-cli" +version.workspace = true +edition.workspace = true + +[dependencies] +migrate.workspace = true +sea-orm = { workspace = true, features = ["sqlx-all", "runtime-tokio"] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } +anyhow.workspace = true +clap.workspace = true +dotenvy.workspace = true +config = { workspace = true } \ No newline at end of file diff --git a/apps/migrate/src/main.rs b/apps/migrate/src/main.rs new file mode 100644 index 0000000..126c359 --- /dev/null +++ b/apps/migrate/src/main.rs @@ -0,0 +1,102 @@ +use anyhow::Context; +use clap::Command; +use migrate::MigratorTrait; +use sea_orm::{Database, DatabaseConnection}; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + dotenvy::dotenv().ok(); + config::AppConfig::load(); + + let cmd = Command::new("migrate") + .about("Database migration CLI") + .arg( + clap::Arg::new("steps") + .help("Number of migrations (for up/down)") + .required(false) + .index(1), + ) + .subcommand(Command::new("up").about("Apply pending migrations")) + .subcommand(Command::new("down").about("Revert applied migrations")) + .subcommand(Command::new("fresh").about("Drop all tables and re-apply")) + .subcommand(Command::new("refresh").about("Revert all then re-apply")) + .subcommand(Command::new("reset").about("Revert all applied migrations")) + .subcommand(Command::new("status").about("Show migration status")) + .try_get_matches() + .map_err(|e| anyhow::anyhow!("{}", e))?; + + let db_url = config::AppConfig::load().database_url()?; + + let db: DatabaseConnection = Database::connect(&db_url).await?; + + match cmd.subcommand_name() { + Some("up") => { + let steps = cmd + .get_one::<String>("steps") + .and_then(|s| s.parse().ok()) + .unwrap_or(0); + run_up(&db, steps).await?; + } + Some("down") => { + let steps = cmd + .get_one::<String>("steps") + .and_then(|s| s.parse().ok()) + .unwrap_or(1); + run_down(&db, steps).await?; + } + Some("fresh") => run_fresh(&db).await?, + Some("refresh") => run_refresh(&db).await?, + Some("reset") => run_reset(&db).await?, + Some("status") => run_status(&db).await?, + _ => { + eprintln!( + "Usage: migrate <command>\nCommands: up, down, fresh, refresh, reset, status" + ); + std::process::exit(1); + } + } + + Ok(()) +} + +async fn run_up(db: &DatabaseConnection, steps: u32) -> anyhow::Result<()> { + migrate::Migrator::up(db, if steps == 0 { None } else { Some(steps) }) + .await + .context("failed to run migrations up")?; + Ok(()) +} + +async fn run_down(db: &DatabaseConnection, steps: u32) -> anyhow::Result<()> { + migrate::Migrator::down(db, Some(steps)) + .await + .context("failed to run migrations down")?; + Ok(()) +} + +async fn run_fresh(db: &DatabaseConnection) -> anyhow::Result<()> { + migrate::Migrator::fresh(db) + .await + .context("failed to run migrations fresh")?; + Ok(()) +} + +async fn run_refresh(db: &DatabaseConnection) -> anyhow::Result<()> { + migrate::Migrator::refresh(db) + .await + .context("failed to run migrations refresh")?; + Ok(()) +} + +async fn run_reset(db: &DatabaseConnection) -> anyhow::Result<()> { + migrate::Migrator::reset(db) + .await + .context("failed to run migrations reset")?; + Ok(()) +} + +async fn run_status(db: &DatabaseConnection) -> anyhow::Result<()> { + migrate::Migrator::status(db) + .await + .context("failed to get migration status")?; + Ok(()) +} diff --git a/apps/operator/Cargo.toml b/apps/operator/Cargo.toml new file mode 100644 index 0000000..211f759 --- /dev/null +++ b/apps/operator/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "operator" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[dependencies] +kube = { workspace = true } +k8s-openapi = { workspace = true } +serde = { workspace = true } +serde_json.workspace = true +serde_yaml = { workspace = true } +tokio = { workspace = true, features = ["rt-multi-thread", "macros", "sync"] } +anyhow.workspace = true +futures.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +chrono = { workspace = true } +uuid = { workspace = true, features = ["v4"] } + +[lints] +workspace = true diff --git a/apps/operator/src/context.rs b/apps/operator/src/context.rs new file mode 100644 index 0000000..ca782a1 --- /dev/null +++ b/apps/operator/src/context.rs @@ -0,0 +1,44 @@ +//! Shared reconcile context. + +use kube::Client; + +/// Context passed to every reconcile call. +#[derive(Clone)] +pub struct ReconcileCtx { + pub client: Client, + /// Default image registry prefix (e.g. "myapp/"). + pub image_prefix: String, + /// Operator's own namespace. + pub operator_namespace: String, +} + +impl ReconcileCtx { + pub async fn from_env() -> anyhow::Result<Self> { + let client = Client::try_default().await?; + let ns = std::env::var("POD_NAMESPACE").unwrap_or_else(|_| "default".to_string()); + let prefix = + std::env::var("OPERATOR_IMAGE_PREFIX").unwrap_or_else(|_| "myapp/".to_string()); + + Ok(Self { + client, + image_prefix: prefix, + operator_namespace: ns, + }) + } + + /// Prepend image_prefix to an unqualified image name. + /// E.g. "app:latest" → "myapp/app:latest" + pub fn resolve_image(&self, image: &str) -> String { + // If it already has a registry/domain component, leave it alone. + if image.contains('/') && !image.starts_with(&self.image_prefix) { + image.to_string() + } else if image.starts_with(&self.image_prefix) { + image.to_string() + } else { + // Unqualified name: prepend prefix. + format!("{}{}", self.image_prefix, image) + } + } +} + +pub type ReconcileState = ReconcileCtx; diff --git a/apps/operator/src/controller/app.rs b/apps/operator/src/controller/app.rs new file mode 100644 index 0000000..a1d2731 --- /dev/null +++ b/apps/operator/src/controller/app.rs @@ -0,0 +1,221 @@ +//! Controller for the `App` CRD — manages Deployment + Service. + +use crate::context::ReconcileState; +use crate::controller::helpers::{ + child_meta, env_var_to_json, merge_env, owner_ref, query_deployment_status, std_labels, +}; +use crate::crd::{App, AppSpec}; +use serde_json::{Value, json}; +use std::sync::Arc; +use tracing::info; + +/// Reconcile an App resource: create/update Deployment + Service. +pub async fn reconcile(app: Arc<App>, ctx: Arc<ReconcileState>) -> Result<(), kube::Error> { + let ns = app.metadata.namespace.as_deref().unwrap_or("default"); + let name = app.metadata.name.as_deref().unwrap_or(""); + let spec = &app.spec; + let client = &ctx.client; + + let or = owner_ref(&app.metadata, &app.api_version, &app.kind); + let labels = std_labels(); + + // ---- Deployment ---- + let deployment = build_deployment(ns, name, spec, &or, &labels); + apply_deployment(client, ns, name, &deployment).await?; + + // ---- Service ---- + let service = build_service(ns, name, &or, &labels); + apply_service(client, ns, name, &service).await?; + + // ---- Status patch ---- + let (ready_replicas, phase) = query_deployment_status(client, ns, name).await?; + let status = json!({ + "status": { + "readyReplicas": ready_replicas, + "phase": phase + } + }); + patch_status::<App>(client, ns, name, &status).await?; + + Ok(()) +} + +fn build_deployment( + ns: &str, + name: &str, + spec: &AppSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + let env = merge_env(&[], &spec.env); + let image = if spec.image.is_empty() { + "myapp/app:latest".to_string() + } else { + spec.image.clone() + }; + let pull = if spec.image_pull_policy.is_empty() { + "IfNotPresent".to_string() + } else { + spec.image_pull_policy.clone() + }; + + let resources = build_resources(&spec.resources); + let liveness = spec.liveness_probe.as_ref().map(|p| { + json!({ + "httpGet": { "path": p.path, "port": p.port }, + "initialDelaySeconds": p.initial_delay_seconds, + "periodSeconds": 10, + }) + }); + let readiness = spec.readiness_probe.as_ref().map(|p| { + json!({ + "httpGet": { "path": p.path, "port": p.port }, + "initialDelaySeconds": p.initial_delay_seconds, + "periodSeconds": 5, + }) + }); + + json!({ + "metadata": child_meta(name, ns, or, labels.clone()), + "spec": { + "replicas": spec.replicas, + "selector": { "matchLabels": labels }, + "strategy": { + "type": "RollingUpdate", + "rollingUpdate": { "maxSurge": 1, "maxUnavailable": 0 } + }, + "template": { + "metadata": { "labels": labels.clone() }, + "spec": { + "containers": [{ + "name": "app", + "image": image, + "ports": [{ "containerPort": 8080 }], + "env": env.iter().map(env_var_to_json).collect::<Vec<_>>(), + "imagePullPolicy": pull, + "resources": resources, + "livenessProbe": liveness, + "readinessProbe": readiness, + }] + } + } + } + }) +} + +fn build_service( + ns: &str, + name: &str, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + json!({ + "metadata": child_meta(name, ns, or, labels.clone()), + "spec": { + "ports": [{ "port": 80, "targetPort": 8080, "name": "http" }], + "selector": labels.clone(), + "type": "ClusterIP" + } + }) +} + +pub(crate) fn build_resources(res: &Option<crate::crd::ResourceRequirements>) -> Value { + match res { + Some(r) => { + let mut out = serde_json::Map::new(); + if let Some(ref req) = r.requests { + let mut req_map = serde_json::Map::new(); + if let Some(ref cpu) = req.cpu { + req_map.insert("cpu".to_string(), json!(cpu)); + } + if let Some(ref mem) = req.memory { + req_map.insert("memory".to_string(), json!(mem)); + } + if !req_map.is_empty() { + out.insert("requests".to_string(), Value::Object(req_map)); + } + } + if let Some(ref lim) = r.limits { + let mut lim_map = serde_json::Map::new(); + if let Some(ref cpu) = lim.cpu { + lim_map.insert("cpu".to_string(), json!(cpu)); + } + if let Some(ref mem) = lim.memory { + lim_map.insert("memory".to_string(), json!(mem)); + } + if !lim_map.is_empty() { + out.insert("limits".to_string(), Value::Object(lim_map)); + } + } + if out.is_empty() { + json!({}) + } else { + Value::Object(out) + } + } + None => json!({}), + } +} + +pub(crate) async fn apply_deployment( + client: &kube::Client, + ns: &str, + name: &str, + body: &Value, +) -> Result<(), kube::Error> { + let api: kube::Api<crate::crd::JsonResource> = kube::Api::namespaced(client.clone(), ns); + let jr = crate::crd::JsonResource::new(Default::default(), body.clone()); + match api.get(name).await { + Ok(_) => { + info!(name, ns, "updating app deployment"); + let _ = api + .replace(name, &kube::api::PostParams::default(), &jr) + .await?; + } + Err(kube::Error::Api(e)) if e.code == 404 => { + info!(name, ns, "creating app deployment"); + let _ = api.create(&kube::api::PostParams::default(), &jr).await?; + } + Err(e) => return Err(e), + } + Ok(()) +} + +pub(crate) async fn apply_service( + client: &kube::Client, + ns: &str, + name: &str, + body: &Value, +) -> Result<(), kube::Error> { + let api: kube::Api<crate::crd::JsonResource> = kube::Api::namespaced(client.clone(), ns); + let jr = crate::crd::JsonResource::new(Default::default(), body.clone()); + match api.get(name).await { + Ok(_) => { + let _ = api + .replace(name, &kube::api::PostParams::default(), &jr) + .await?; + } + Err(kube::Error::Api(e)) if e.code == 404 => { + let _ = api.create(&kube::api::PostParams::default(), &jr).await?; + } + Err(e) => return Err(e), + } + Ok(()) +} + +pub(crate) async fn patch_status<T: Clone + serde::de::DeserializeOwned + std::fmt::Debug>( + client: &kube::Client, + ns: &str, + name: &str, + body: &Value, +) -> Result<(), kube::Error> { + let api: kube::Api<crate::crd::JsonResource> = kube::Api::namespaced(client.clone(), ns); + let _ = api + .patch_status( + name, + &kube::api::PatchParams::default(), + &kube::api::Patch::Merge(body), + ) + .await?; + Ok(()) +} diff --git a/apps/operator/src/controller/email_worker.rs b/apps/operator/src/controller/email_worker.rs new file mode 100644 index 0000000..e05a8e2 --- /dev/null +++ b/apps/operator/src/controller/email_worker.rs @@ -0,0 +1,68 @@ +//! Controller for the `EmailWorker` CRD — Deployment only. + +use crate::context::ReconcileState; +use crate::controller::app::{apply_deployment, patch_status}; +use crate::controller::helpers::{child_meta, env_var_to_json, merge_env, owner_ref, query_deployment_status, std_labels}; +use crate::crd::{EmailWorker, EmailWorkerSpec}; +use serde_json::{Value, json}; +use std::sync::Arc; + +pub async fn reconcile(ew: Arc<EmailWorker>, ctx: Arc<ReconcileState>) -> Result<(), kube::Error> { + let ns = ew.metadata.namespace.as_deref().unwrap_or("default"); + let name = ew.metadata.name.as_deref().unwrap_or(""); + let spec = &ew.spec; + let client = &ctx.client; + + let or = owner_ref(&ew.metadata, &ew.api_version, &ew.kind); + let labels = std_labels(); + + let deployment = build_deployment(ns, name, spec, &or, &labels); + apply_deployment(client, ns, name, &deployment).await?; + + let (ready_replicas, phase) = query_deployment_status(client, ns, name).await?; + let status = json!({ "status": { "readyReplicas": ready_replicas, "phase": phase } }); + patch_status::<EmailWorker>(client, ns, name, &status).await?; + + Ok(()) +} + +fn build_deployment( + ns: &str, + name: &str, + spec: &EmailWorkerSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + let env = merge_env(&[], &spec.env); + let image = if spec.image.is_empty() { + "myapp/email-worker:latest".to_string() + } else { + spec.image.clone() + }; + let pull = if spec.image_pull_policy.is_empty() { + "IfNotPresent".to_string() + } else { + spec.image_pull_policy.clone() + }; + let resources = super::app::build_resources(&spec.resources); + + json!({ + "metadata": child_meta(name, ns, or, labels.clone()), + "spec": { + "replicas": 1, + "selector": { "matchLabels": labels }, + "template": { + "metadata": { "labels": labels.clone() }, + "spec": { + "containers": [{ + "name": "email-worker", + "image": image, + "env": env.iter().map(env_var_to_json).collect::<Vec<_>>(), + "imagePullPolicy": pull, + "resources": resources, + }] + } + } + } + }) +} diff --git a/apps/operator/src/controller/git_hook.rs b/apps/operator/src/controller/git_hook.rs new file mode 100644 index 0000000..312a59a --- /dev/null +++ b/apps/operator/src/controller/git_hook.rs @@ -0,0 +1,137 @@ +//! Controller for the `GitHook` CRD — Deployment + ConfigMap. + +use crate::context::ReconcileState; +use crate::controller::app::{apply_deployment, patch_status}; +use crate::controller::helpers::{child_meta, env_var_to_json, merge_env, owner_ref, query_deployment_status, std_labels}; +use crate::crd::{GitHook, GitHookSpec, JsonResource}; +use serde_json::{Value, json}; +use std::sync::Arc; +use tracing::info; + +pub async fn reconcile(gh: Arc<GitHook>, ctx: Arc<ReconcileState>) -> Result<(), kube::Error> { + let ns = gh.metadata.namespace.as_deref().unwrap_or("default"); + let name = gh.metadata.name.as_deref().unwrap_or(""); + let spec = &gh.spec; + let client = &ctx.client; + + let or = owner_ref(&gh.metadata, &gh.api_version, &gh.kind); + let labels = std_labels(); + let cm_name = format!("{}-config", name); + + // ---- ConfigMap ---- + let configmap = build_configmap(ns, &cm_name, &or, &labels); + apply_configmap(client, ns, &cm_name, &configmap).await?; + + // ---- Deployment ---- + let deployment = build_deployment(ns, name, &cm_name, spec, &or, &labels); + apply_deployment(client, ns, name, &deployment).await?; + + let (ready_replicas, phase) = query_deployment_status(client, ns, name).await?; + let status = json!({ "status": { "readyReplicas": ready_replicas, "phase": phase } }); + patch_status::<GitHook>(client, ns, name, &status).await?; + + Ok(()) +} + +fn build_configmap( + ns: &str, + cm_name: &str, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + let pool_config = serde_yaml::to_string(&serde_json::json!({ + "max_concurrent": 8, + "cpu_threshold": 80.0, + "redis_list_prefix": "{hook}", + "redis_log_channel": "hook:logs", + "redis_block_timeout_secs": 5, + "redis_max_retries": 3, + })) + .unwrap_or_default(); + + json!({ + "metadata": child_meta(cm_name, ns, or, labels.clone()), + "data": { + "pool.yaml": pool_config + } + }) +} + +fn build_deployment( + ns: &str, + name: &str, + cm_name: &str, + spec: &GitHookSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + let env = merge_env(&[], &spec.env); + let image = if spec.image.is_empty() { + "myapp/git-hook:latest".to_string() + } else { + spec.image.clone() + }; + let pull = if spec.image_pull_policy.is_empty() { + "IfNotPresent".to_string() + } else { + spec.image_pull_policy.clone() + }; + let resources = super::app::build_resources(&spec.resources); + + // Add WORKER_ID env + let worker_id = spec + .worker_id + .clone() + .unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); + let mut env_vars: Vec<serde_json::Value> = env.iter().map(env_var_to_json).collect(); + env_vars.push(json!({ "name": "HOOK_POOL_WORKER_ID", "value": worker_id })); + + json!({ + "metadata": child_meta(name, ns, or, labels.clone()), + "spec": { + "replicas": 1, + "selector": { "matchLabels": labels }, + "template": { + "metadata": { "labels": labels.clone() }, + "spec": { + "containers": [{ + "name": "git-hook", + "image": image, + "env": env_vars, + "imagePullPolicy": pull, + "resources": resources, + "volumeMounts": [{ "name": "hook-config", "mountPath": "/config" }] + }], + "volumes": [{ + "name": "hook-config", + "configMap": { "name": cm_name } + }] + } + } + } + }) +} + +async fn apply_configmap( + client: &kube::Client, + ns: &str, + name: &str, + body: &Value, +) -> Result<(), kube::Error> { + let api: kube::Api<JsonResource> = kube::Api::namespaced(client.clone(), ns); + let jr = JsonResource::new(Default::default(), body.clone()); + match api.get(name).await { + Ok(_) => { + let _ = api + .replace(name, &kube::api::PostParams::default(), &jr) + .await?; + Ok(()) + } + Err(kube::Error::Api(e)) if e.code == 404 => { + info!(name, ns, "creating git-hook configmap"); + let _ = api.create(&kube::api::PostParams::default(), &jr).await?; + Ok(()) + } + Err(e) => Err(e), + } +} diff --git a/apps/operator/src/controller/gitserver.rs b/apps/operator/src/controller/gitserver.rs new file mode 100644 index 0000000..976bab8 --- /dev/null +++ b/apps/operator/src/controller/gitserver.rs @@ -0,0 +1,164 @@ +//! Controller for the `GitServer` CRD — Deployment + HTTP Svc + SSH Svc + PVC. + +use crate::context::ReconcileState; +use crate::controller::app::{apply_deployment, apply_service, patch_status}; +use crate::controller::helpers::{child_meta, env_var_to_json, merge_env, owner_ref, query_deployment_status, std_labels}; +use crate::crd::{GitServer, GitServerSpec}; +use serde_json::{Value, json}; +use std::sync::Arc; +use tracing::info; + +pub async fn reconcile(gs: Arc<GitServer>, ctx: Arc<ReconcileState>) -> Result<(), kube::Error> { + let ns = gs.metadata.namespace.as_deref().unwrap_or("default"); + let name = gs.metadata.name.as_deref().unwrap_or(""); + let spec = &gs.spec; + let client = &ctx.client; + + let or = owner_ref(&gs.metadata, &gs.api_version, &gs.kind); + let labels = std_labels(); + + // ---- PVC ---- + let pvc = build_pvc(ns, name, spec, &or, &labels); + apply_pvc(client, ns, &format!("{}-repos", name), &pvc).await?; + + // ---- Deployment ---- + let deployment = build_deployment(ns, name, spec, &or, &labels); + apply_deployment(client, ns, name, &deployment).await?; + + // ---- HTTP Service ---- + let http_svc = build_http_service(ns, name, spec, &or, &labels); + apply_service(client, ns, &format!("{}-http", name), &http_svc).await?; + + // ---- SSH Service ---- + let ssh_svc = build_ssh_service(ns, name, spec, &or, &labels); + apply_service(client, ns, &format!("{}-ssh", name), &ssh_svc).await?; + + // ---- Status ---- + let (ready_replicas, phase) = query_deployment_status(client, ns, name).await?; + let status = json!({ "status": { "readyReplicas": ready_replicas, "phase": phase } }); + patch_status::<GitServer>(client, ns, name, &status).await?; + + Ok(()) +} + +fn build_deployment( + ns: &str, + name: &str, + spec: &GitServerSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + let env = merge_env(&[], &spec.env); + let image = if spec.image.is_empty() { + "myapp/gitserver:latest".to_string() + } else { + spec.image.clone() + }; + let pull = if spec.image_pull_policy.is_empty() { + "IfNotPresent".to_string() + } else { + spec.image_pull_policy.clone() + }; + let resources = super::app::build_resources(&spec.resources); + + json!({ + "metadata": child_meta(name, ns, or, labels.clone()), + "spec": { + "replicas": 1, + "selector": { "matchLabels": labels }, + "template": { + "metadata": { "labels": labels.clone() }, + "spec": { + "containers": [{ + "name": "gitserver", + "image": image, + "ports": [ + { "name": "http", "containerPort": spec.http_port }, + { "name": "ssh", "containerPort": spec.ssh_port } + ], + "env": env.iter().map(env_var_to_json).collect::<Vec<_>>(), + "imagePullPolicy": pull, + "resources": resources, + "volumeMounts": [{ "name": "git-repos", "mountPath": "/data/repos" }] + }], + "volumes": [{ + "name": "git-repos", + "persistentVolumeClaim": { "claimName": format!("{}-repos", name) } + }] + } + } + } + }) +} + +fn build_http_service( + ns: &str, + name: &str, + spec: &GitServerSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + json!({ + "metadata": child_meta(&format!("{}-http", name), ns, or, labels.clone()), + "spec": { + "ports": [{ "port": spec.http_port, "targetPort": spec.http_port, "name": "http" }], + "selector": labels.clone(), + "type": "ClusterIP" + } + }) +} + +fn build_ssh_service( + ns: &str, + name: &str, + spec: &GitServerSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + json!({ + "metadata": child_meta(&format!("{}-ssh", name), ns, or, labels.clone()), + "spec": { + "ports": [{ "port": spec.ssh_port, "targetPort": spec.ssh_port, "name": "ssh" }], + "selector": labels.clone(), + "type": spec.ssh_service_type + } + }) +} + +fn build_pvc( + ns: &str, + name: &str, + spec: &GitServerSpec, + or: &crate::crd::OwnerReference, + labels: &std::collections::BTreeMap<String, String>, +) -> Value { + json!({ + "metadata": child_meta(&format!("{}-repos", name), ns, or, labels.clone()), + "spec": { + "accessModes": ["ReadWriteOnce"], + "resources": { "requests": { "storage": spec.storage_size } } + } + }) +} + +async fn apply_pvc( + client: &kube::Client, + ns: &str, + name: &str, + body: &Value, +) -> Result<(), kube::Error> { + let api: kube::Api<crate::crd::JsonResource> = kube::Api::namespaced(client.clone(), ns); + let jr = crate::crd::JsonResource::new(Default::default(), body.clone()); + match api.get(name).await { + Ok(_) => { + /* already exists, don't replace PVC */ + Ok(()) + } + Err(kube::Error::Api(e)) if e.code == 404 => { + info!(name, ns, "creating gitserver pvc"); + let _ = api.create(&kube::api::PostParams::default(), &jr).await?; + Ok(()) + } + Err(e) => Err(e), + } +} diff --git a/apps/operator/src/controller/helpers.rs b/apps/operator/src/controller/helpers.rs new file mode 100644 index 0000000..2ac6d6a --- /dev/null +++ b/apps/operator/src/controller/helpers.rs @@ -0,0 +1,96 @@ +//! Shared helpers for building Kubernetes child resources as JSON objects. + +use crate::crd::{EnvVar, K8sObjectMeta, OwnerReference}; + +/// Query a Deployment's actual status and derive the CR's phase. +pub async fn query_deployment_status( + client: &kube::Client, + ns: &str, + name: &str, +) -> Result<(i32, String), kube::Error> { + use k8s_openapi::api::apps::v1::Deployment; + + let api: kube::Api<Deployment> = kube::Api::namespaced(client.clone(), ns); + match api.get(name).await { + Ok(d) => { + let ready = d.status.as_ref().and_then(|s| s.ready_replicas).unwrap_or(0); + let phase = if ready > 0 { "Running" } else { "Pending" }; + Ok((ready, phase.to_string())) + } + Err(kube::Error::Api(e)) if e.code == 404 => Ok((0, "Pending".to_string())), + Err(e) => Err(e), + } +} + +/// Labels applied to every child resource. +pub fn std_labels() -> std::collections::BTreeMap<String, String> { + let mut m = std::collections::BTreeMap::new(); + m.insert( + "app.kubernetes.io/managed-by".to_string(), + "code-operator".to_string(), + ); + m.insert( + "app.kubernetes.io/part-of".to_string(), + "code-system".to_string(), + ); + m +} + +pub fn child_meta( + name: &str, + namespace: &str, + owner: &OwnerReference, + labels: std::collections::BTreeMap<String, String>, +) -> K8sObjectMeta { + K8sObjectMeta { + name: Some(name.to_string()), + namespace: Some(namespace.to_string()), + labels: Some(labels), + owner_references: Some(vec![owner.clone().into()]), + ..Default::default() + } +} + +pub fn owner_ref(parent: &K8sObjectMeta, api_version: &str, kind: &str) -> OwnerReference { + OwnerReference { + api_version: api_version.to_string(), + kind: kind.to_string(), + name: parent.name.clone().unwrap_or_default(), + uid: parent.uid.clone().unwrap_or_default(), + controller: Some(true), + block_owner_deletion: Some(true), + } +} + +/// Merge env vars (global first, then local overrides). +pub fn merge_env(global: &[EnvVar], local: &[EnvVar]) -> Vec<EnvVar> { + use std::collections::BTreeMap; + let mut map: BTreeMap<String, EnvVar> = global + .iter() + .cloned() + .map(|e| (e.name.clone(), e)) + .collect(); + for e in local { + map.insert(e.name.clone(), e.clone()); + } + map.into_values().collect() +} + +pub fn env_var_to_json(e: &EnvVar) -> serde_json::Value { + use serde_json::json; + let mut m = json!({ "name": e.name }); + if let Some(ref v) = e.value { + m["value"] = json!(v); + } + if let Some(ref src) = e.value_from { + if let Some(ref sr) = src.secret_ref { + m["valueFrom"] = json!({ + "secretRef": { + "name": sr.secret_name, + "key": sr.secret_key, + } + }); + } + } + m +} diff --git a/apps/operator/src/controller/migrate.rs b/apps/operator/src/controller/migrate.rs new file mode 100644 index 0000000..47ce299 --- /dev/null +++ b/apps/operator/src/controller/migrate.rs @@ -0,0 +1,171 @@ +//! Controller for the `Migrate` CRD — creates a one-shot Job on reconcile. +//! +//! The Job is re-created on every reconcile (idempotent). Once the Job +//! succeeds, the Migrate status is patched to "Completed". + +use crate::context::ReconcileState; +use crate::controller::helpers::{child_meta, env_var_to_json, merge_env, owner_ref, std_labels}; +use crate::crd::{JsonResource, K8sObjectMeta, Migrate, MigrateSpec}; +use chrono::Utc; +use serde_json::{Value, json}; +use std::sync::Arc; +use tracing::info; + +pub async fn reconcile(mig: Arc<Migrate>, ctx: Arc<ReconcileState>) -> Result<(), kube::Error> { + let ns = mig.metadata.namespace.as_deref().unwrap_or("default"); + let name = mig.metadata.name.as_deref().unwrap_or(""); + let spec = &mig.spec; + let client = &ctx.client; + + let or = owner_ref(&mig.metadata, &mig.api_version, &mig.kind); + let labels = std_labels(); + + let job_meta = child_meta(name, ns, &or, labels.clone()); + let job = build_job(spec, job_meta, &labels); + + // Use JsonResource for Job create/replace (spec part) + let jobs_api: kube::Api<JsonResource> = kube::Api::namespaced(client.clone(), ns); + match jobs_api.get(name).await { + Ok(_) => { + info!(name, ns, "replacing migrate job"); + let _ = jobs_api + .replace(name, &kube::api::PostParams::default(), &job) + .await?; + } + Err(kube::Error::Api(e)) if e.code == 404 => { + info!(name, ns, "creating migrate job"); + let _ = jobs_api.create(&kube::api::PostParams::default(), &job).await?; + } + Err(e) => return Err(e), + } + + // Query real Job status via k8s-openapi (reads status subresource) + let job_status = query_job_status(client, ns, name).await?; + patch_migrate_status_from_job(client, ns, name, &job_status).await?; + + Ok(()) +} + +/// Query actual Job status and derive Migrate phase + timestamps. +async fn query_job_status( + client: &kube::Client, + ns: &str, + name: &str, +) -> Result<JobStatusResult, kube::Error> { + use k8s_openapi::api::batch::v1::Job; + let api: kube::Api<Job> = kube::Api::namespaced(client.clone(), ns); + match api.get(name).await { + Ok(job) => { + let status = job.status.as_ref(); + let succeeded = status.and_then(|s| s.succeeded).unwrap_or(0); + let failed = status.and_then(|s| s.failed).unwrap_or(0); + let active = status.and_then(|s| s.active).unwrap_or(0); + + let phase = if succeeded > 0 { + "Completed" + } else if failed > 0 { + "Failed" + } else if active > 0 { + "Running" + } else { + "Pending" + }; + + let start_time = status.and_then(|s| s.start_time.as_ref()).map(|t| t.to_string()); + let completion_time = status.and_then(|s| s.completion_time.as_ref()).map(|t| t.to_string()); + + Ok(JobStatusResult { phase, start_time, completion_time }) + } + Err(kube::Error::Api(e)) if e.code == 404 => { + Ok(JobStatusResult { phase: "Pending".to_string(), start_time: None, completion_time: None }) + } + Err(e) => Err(e), + } +} + +struct JobStatusResult { + phase: String, + start_time: Option<String>, + completion_time: Option<String>, +} + +async fn patch_migrate_status_from_job( + client: &kube::Client, + ns: &str, + name: &str, + job: &JobStatusResult, +) -> Result<(), kube::Error> { + let api: kube::Api<JsonResource> = kube::Api::namespaced(client.clone(), ns); + let mut status_obj = json!({ "phase": job.phase }); + if let Some(ref st) = job.start_time { + status_obj["startTime"] = json!(st); + } + if let Some(ref ct) = job.completion_time { + status_obj["completionTime"] = json!(ct); + } + let patch = json!({ "status": status_obj }); + let _ = api + .patch_status( + name, + &kube::api::PatchParams::default(), + &kube::api::Patch::Merge(&patch), + ) + .await?; + Ok(()) +} + +fn build_job( + spec: &MigrateSpec, + meta: K8sObjectMeta, + labels: &std::collections::BTreeMap<String, String>, +) -> JsonResource { + let image = if spec.image.is_empty() { + "myapp/migrate:latest".to_string() + } else { + spec.image.clone() + }; + + let env = merge_env(&[], &spec.env); + let env_vars: Vec<Value> = env.iter().map(env_var_to_json).collect(); + + let cmd_parts: Vec<&str> = spec.command.split_whitespace().collect(); + let cmd: Vec<&str> = if cmd_parts.is_empty() { + vec!["up"] + } else { + cmd_parts + }; + + let now = Utc::now().to_rfc3339(); + + let mut meta_with_anno = meta.clone(); + meta_with_anno.annotations = Some(std::collections::BTreeMap::from([( + "code.dev/last-migrate".to_string(), + now, + )])); + + let body = json!({ + "metadata": meta_with_anno, + "spec": { + "backoffLimit": spec.backoff_limit, + "ttlSecondsAfterFinished": 300, + "template": { + "metadata": { + "labels": labels.clone() + }, + "spec": { + "restartPolicy": "Never", + "containers": [{ + "name": "migrate", + "image": image, + "command": ["/app/migrate"], + "args": cmd, + "env": env_vars, + "imagePullPolicy": "IfNotPresent" + }] + } + } + } + }); + + JsonResource::new(meta, body) +} diff --git a/apps/operator/src/controller/mod.rs b/apps/operator/src/controller/mod.rs new file mode 100644 index 0000000..bac5dd6 --- /dev/null +++ b/apps/operator/src/controller/mod.rs @@ -0,0 +1,188 @@ +//! Kubernetes Controllers — one per CRD type. + +pub mod app; +pub mod email_worker; +pub mod git_hook; +pub mod gitserver; +pub mod helpers; +pub mod migrate; + +use crate::context::ReconcileCtx; +use crate::crd::{App, EmailWorker, GitHook, GitServer, Migrate}; +use futures::StreamExt; +use kube::runtime::{Controller, controller::Action}; +use std::sync::Arc; + +fn error_policy<K: std::fmt::Debug>( + obj: Arc<K>, + err: &kube::Error, + _: Arc<ReconcileCtx>, +) -> Action { + tracing::error!(?obj, %err, "reconcile error"); + Action::await_change() +} + +/// Start the App controller. +pub async fn start_app(client: kube::Client, ctx: Arc<ReconcileCtx>) -> anyhow::Result<()> { + Controller::new(kube::Api::<App>::all(client.clone()), Default::default()) + .owns::<k8s_openapi::api::apps::v1::Deployment>( + kube::Api::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::core::v1::Service>( + kube::Api::all(client.clone()), + Default::default(), + ) + .run( + |o, c| { + let c = c.clone(); + async move { + app::reconcile(o, c).await?; + Ok::<_, kube::Error>(Action::await_change()) + } + }, + error_policy, + ctx.clone(), + ) + .for_each(|r| async move { + if let Err(e) = r { + tracing::error!(%e, "app controller stream error"); + } + }) + .await; + + Ok(()) +} + +/// Start the GitServer controller. +pub async fn start_gitserver(client: kube::Client, ctx: Arc<ReconcileCtx>) -> anyhow::Result<()> { + Controller::new( + kube::Api::<GitServer>::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::apps::v1::Deployment>( + kube::Api::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::core::v1::Service>(kube::Api::all(client.clone()), Default::default()) + .owns::<k8s_openapi::api::core::v1::PersistentVolumeClaim>( + kube::Api::all(client.clone()), + Default::default(), + ) + .run( + |o, c| { + let c = c.clone(); + async move { + gitserver::reconcile(o, c).await?; + Ok::<_, kube::Error>(Action::await_change()) + } + }, + error_policy, + ctx.clone(), + ) + .for_each(|r| async move { + if let Err(e) = r { + tracing::error!(%e, "gitserver controller stream error"); + } + }) + .await; + + Ok(()) +} + +/// Start the EmailWorker controller. +pub async fn start_email_worker( + client: kube::Client, + ctx: Arc<ReconcileCtx>, +) -> anyhow::Result<()> { + Controller::new( + kube::Api::<EmailWorker>::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::apps::v1::Deployment>( + kube::Api::all(client.clone()), + Default::default(), + ) + .run( + |o, c| { + let c = c.clone(); + async move { + email_worker::reconcile(o, c).await?; + Ok::<_, kube::Error>(Action::await_change()) + } + }, + error_policy, + ctx.clone(), + ) + .for_each(|r| async move { + if let Err(e) = r { + tracing::error!(%e, "email_worker controller stream error"); + } + }) + .await; + + Ok(()) +} + +/// Start the GitHook controller. +pub async fn start_git_hook(client: kube::Client, ctx: Arc<ReconcileCtx>) -> anyhow::Result<()> { + Controller::new( + kube::Api::<GitHook>::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::apps::v1::Deployment>( + kube::Api::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::core::v1::ConfigMap>( + kube::Api::all(client.clone()), + Default::default(), + ) + .run( + |o, c| { + let c = c.clone(); + async move { + git_hook::reconcile(o, c).await?; + Ok::<_, kube::Error>(Action::await_change()) + } + }, + error_policy, + ctx.clone(), + ) + .for_each(|r| async move { + if let Err(e) = r { + tracing::error!(%e, "git_hook controller stream error"); + } + }) + .await; + + Ok(()) +} + +/// Start the Migrate controller. +pub async fn start_migrate(client: kube::Client, ctx: Arc<ReconcileCtx>) -> anyhow::Result<()> { + Controller::new( + kube::Api::<Migrate>::all(client.clone()), + Default::default(), + ) + .owns::<k8s_openapi::api::batch::v1::Job>(kube::Api::all(client.clone()), Default::default()) + .run( + |o, c| { + let c = c.clone(); + async move { + migrate::reconcile(o, c).await?; + Ok::<_, kube::Error>(Action::await_change()) + } + }, + error_policy, + ctx.clone(), + ) + .for_each(|r| async move { + if let Err(e) = r { + tracing::error!(%e, "migrate controller stream error"); + } + }) + .await; + + Ok(()) +} diff --git a/apps/operator/src/crd.rs b/apps/operator/src/crd.rs new file mode 100644 index 0000000..518b3d9 --- /dev/null +++ b/apps/operator/src/crd.rs @@ -0,0 +1,581 @@ +//! Custom Resource Definitions (CRDs) — plain serde types. +//! +//! API Group: `code.dev` +//! +//! The operator watches these resources using `kube::Api::<MyCrd>::all(client)`. +//! Reconcile is triggered on every change to any instance of these types. + +use k8s_openapi::apimachinery::pkg::apis::meta::v1::{ + ObjectMeta, OwnerReference as K8sOwnerReference, +}; +use kube::Resource; +use serde::{Deserialize, Serialize}; +use std::borrow::Cow; + +// --------------------------------------------------------------------------- +// A dynamic Resource impl for serde_json::Value — lets us use kube::Api<Value> +// --------------------------------------------------------------------------- + +/// JsonResource wraps serde_json::Value and implements Resource so we can use +/// `kube::Api<JsonResource>` for arbitrary child-resource API calls. +/// The metadata field is kept separate to satisfy the Resource::meta() bound. +#[derive(Clone, Debug, Default)] +pub struct JsonResource { + meta: ObjectMeta, + body: serde_json::Value, +} + +impl JsonResource { + pub fn new(meta: ObjectMeta, body: serde_json::Value) -> Self { + JsonResource { meta, body } + } +} + +impl std::ops::Deref for JsonResource { + type Target = serde_json::Value; + fn deref(&self) -> &serde_json::Value { + &self.body + } +} + +impl serde::Serialize for JsonResource { + fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { + self.body.serialize(s) + } +} + +impl<'de> serde::Deserialize<'de> for JsonResource { + fn deserialize<D: serde::Deserializer<'de>>(d: D) -> Result<Self, D::Error> { + let body = serde_json::Value::deserialize(d)?; + let meta = body + .get("metadata") + .and_then(|m| serde_json::from_value(m.clone()).ok()) + .unwrap_or_default(); + Ok(JsonResource { meta, body }) + } +} + +impl Resource for JsonResource { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &()) -> Cow<'_, str> { + Cow::Borrowed("Object") + } + fn group(_: &()) -> Cow<'_, str> { + Cow::Borrowed("") + } + fn version(_: &()) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &()) -> Cow<'_, str> { + Cow::Borrowed("objects") + } + fn meta(&self) -> &ObjectMeta { + &self.meta + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.meta + } +} + +// --------------------------------------------------------------------------- +// Shared types +// --------------------------------------------------------------------------- + +/// EnvVar with optional secret reference. +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EnvVar { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub value: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_from: Option<EnvVarSource>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EnvVarSource { + #[serde(skip_serializing_if = "Option::is_none")] + pub secret_ref: Option<SecretEnvVar>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct SecretEnvVar { + pub name: String, + pub secret_name: String, + pub secret_key: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ResourceRequirements { + #[serde(skip_serializing_if = "Option::is_none")] + pub requests: Option<ResourceList>, + #[serde(skip_serializing_if = "Option::is_none")] + pub limits: Option<ResourceList>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ResourceList { + #[serde(skip_serializing_if = "Option::is_none")] + pub cpu: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub memory: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct Probe { + #[serde(default = "default_port")] + pub port: i32, + #[serde(default = "default_path")] + pub path: String, + #[serde(default = "default_initial_delay")] + pub initial_delay_seconds: i32, +} + +fn default_port() -> i32 { + 8080 +} +fn default_path() -> String { + "/health".to_string() +} +fn default_initial_delay() -> i32 { + 5 +} + +// --------------------------------------------------------------------------- +// App CRD +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AppSpec { + #[serde(default = "default_app_image")] + pub image: String, + #[serde(default = "default_replicas")] + pub replicas: i32, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub env: Vec<EnvVar>, + #[serde(skip_serializing_if = "Option::is_none")] + pub resources: Option<ResourceRequirements>, + #[serde(skip_serializing_if = "Option::is_none")] + pub liveness_probe: Option<Probe>, + #[serde(skip_serializing_if = "Option::is_none")] + pub readiness_probe: Option<Probe>, + #[serde(default)] + pub image_pull_policy: String, +} + +fn default_app_image() -> String { + "myapp/app:latest".to_string() +} +fn default_replicas() -> i32 { + 3 +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct AppStatus { + #[serde(skip_serializing_if = "Option::is_none")] + pub ready_replicas: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub phase: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct App { + pub api_version: String, + pub kind: String, + pub metadata: K8sObjectMeta, + pub spec: AppSpec, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option<AppStatus>, +} + +impl App { + pub fn api_group() -> &'static str { + "code.dev" + } + pub fn version() -> &'static str { + "v1" + } + pub fn plural() -> &'static str { + "apps" + } +} + +impl Resource for App { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("App") + } + fn group(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("code.dev") + } + fn version(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("apps") + } + fn meta(&self) -> &ObjectMeta { + &self.metadata + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.metadata + } +} + +// --------------------------------------------------------------------------- +// GitServer CRD +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GitServerSpec { + #[serde(default = "default_gitserver_image")] + pub image: String, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub env: Vec<EnvVar>, + #[serde(skip_serializing_if = "Option::is_none")] + pub resources: Option<ResourceRequirements>, + #[serde(default = "default_ssh_service_type")] + pub ssh_service_type: String, + #[serde(default = "default_storage_size")] + pub storage_size: String, + #[serde(default)] + pub image_pull_policy: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub ssh_domain: Option<String>, + #[serde(default = "default_ssh_port")] + pub ssh_port: i32, + #[serde(default = "default_http_port")] + pub http_port: i32, +} + +fn default_gitserver_image() -> String { + "myapp/gitserver:latest".to_string() +} +fn default_ssh_service_type() -> String { + "NodePort".to_string() +} +fn default_storage_size() -> String { + "10Gi".to_string() +} +fn default_ssh_port() -> i32 { + 22 +} +fn default_http_port() -> i32 { + 8022 +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct GitServerStatus { + #[serde(skip_serializing_if = "Option::is_none")] + pub ready_replicas: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub phase: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GitServer { + pub api_version: String, + pub kind: String, + pub metadata: K8sObjectMeta, + pub spec: GitServerSpec, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option<GitServerStatus>, +} + +impl GitServer { + pub fn api_group() -> &'static str { + "code.dev" + } + pub fn version() -> &'static str { + "v1" + } + pub fn plural() -> &'static str { + "gitservers" + } +} + +impl Resource for GitServer { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("GitServer") + } + fn group(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("code.dev") + } + fn version(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("gitservers") + } + fn meta(&self) -> &ObjectMeta { + &self.metadata + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.metadata + } +} + +// --------------------------------------------------------------------------- +// EmailWorker CRD +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailWorkerSpec { + #[serde(default = "default_email_image")] + pub image: String, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub env: Vec<EnvVar>, + #[serde(skip_serializing_if = "Option::is_none")] + pub resources: Option<ResourceRequirements>, + #[serde(default)] + pub image_pull_policy: String, +} + +fn default_email_image() -> String { + "myapp/email-worker:latest".to_string() +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EmailWorkerStatus { + #[serde(skip_serializing_if = "Option::is_none")] + pub ready_replicas: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub phase: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailWorker { + pub api_version: String, + pub kind: String, + pub metadata: K8sObjectMeta, + pub spec: EmailWorkerSpec, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option<EmailWorkerStatus>, +} + +impl EmailWorker { + pub fn api_group() -> &'static str { + "code.dev" + } + pub fn version() -> &'static str { + "v1" + } + pub fn plural() -> &'static str { + "emailworkers" + } +} + +impl Resource for EmailWorker { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("EmailWorker") + } + fn group(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("code.dev") + } + fn version(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("emailworkers") + } + fn meta(&self) -> &ObjectMeta { + &self.metadata + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.metadata + } +} + +// --------------------------------------------------------------------------- +// GitHook CRD +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GitHookSpec { + #[serde(default = "default_githook_image")] + pub image: String, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub env: Vec<EnvVar>, + #[serde(skip_serializing_if = "Option::is_none")] + pub resources: Option<ResourceRequirements>, + #[serde(default)] + pub image_pull_policy: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub worker_id: Option<String>, +} + +fn default_githook_image() -> String { + "myapp/git-hook:latest".to_string() +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct GitHookStatus { + #[serde(skip_serializing_if = "Option::is_none")] + pub ready_replicas: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub phase: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GitHook { + pub api_version: String, + pub kind: String, + pub metadata: K8sObjectMeta, + pub spec: GitHookSpec, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option<GitHookStatus>, +} + +impl GitHook { + pub fn api_group() -> &'static str { + "code.dev" + } + pub fn version() -> &'static str { + "v1" + } + pub fn plural() -> &'static str { + "githooks" + } +} + +impl Resource for GitHook { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("GitHook") + } + fn group(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("code.dev") + } + fn version(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("githooks") + } + fn meta(&self) -> &ObjectMeta { + &self.metadata + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.metadata + } +} + +// --------------------------------------------------------------------------- +// Migrate CRD +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MigrateSpec { + #[serde(default = "default_migrate_image")] + pub image: String, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub env: Vec<EnvVar>, + #[serde(default = "default_migrate_cmd")] + pub command: String, + #[serde(default = "default_backoff_limit")] + pub backoff_limit: i32, +} + +fn default_migrate_image() -> String { + "myapp/migrate:latest".to_string() +} +fn default_migrate_cmd() -> String { + "up".to_string() +} +fn default_backoff_limit() -> i32 { + 3 +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct MigrateStatus { + #[serde(skip_serializing_if = "Option::is_none")] + pub phase: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub start_time: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub completion_time: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Migrate { + pub api_version: String, + pub kind: String, + pub metadata: K8sObjectMeta, + pub spec: MigrateSpec, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option<MigrateStatus>, +} + +impl Migrate { + pub fn api_group() -> &'static str { + "code.dev" + } + pub fn version() -> &'static str { + "v1" + } + pub fn plural() -> &'static str { + "migrates" + } +} + +impl Resource for Migrate { + type DynamicType = (); + type Scope = k8s_openapi::NamespaceResourceScope; + fn kind(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("Migrate") + } + fn group(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("code.dev") + } + fn version(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("v1") + } + fn plural(_: &Self::DynamicType) -> Cow<'_, str> { + Cow::Borrowed("migrates") + } + fn meta(&self) -> &ObjectMeta { + &self.metadata + } + fn meta_mut(&mut self) -> &mut ObjectMeta { + &mut self.metadata + } +} + +// --------------------------------------------------------------------------- +// Shared K8s types — aligned with k8s-openapi for Resource trait compatibility +// --------------------------------------------------------------------------- + +/// Type alias so K8sObjectMeta satisfies Resource::meta() -> &k8s_openapi::...::ObjectMeta. +pub type K8sObjectMeta = ObjectMeta; + +/// OwnerReference compatible with k8s-openapi. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OwnerReference { + pub api_version: String, + pub kind: String, + pub name: String, + pub uid: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub controller: Option<bool>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub block_owner_deletion: Option<bool>, +} + +impl From<OwnerReference> for K8sOwnerReference { + fn from(o: OwnerReference) -> Self { + K8sOwnerReference { + api_version: o.api_version, + kind: o.kind, + name: o.name, + uid: o.uid, + controller: o.controller, + block_owner_deletion: o.block_owner_deletion, + } + } +} diff --git a/apps/operator/src/lib.rs b/apps/operator/src/lib.rs new file mode 100644 index 0000000..2282475 --- /dev/null +++ b/apps/operator/src/lib.rs @@ -0,0 +1,3 @@ +pub mod context; +pub mod controller; +pub mod crd; diff --git a/apps/operator/src/main.rs b/apps/operator/src/main.rs new file mode 100644 index 0000000..8f6eae0 --- /dev/null +++ b/apps/operator/src/main.rs @@ -0,0 +1,100 @@ +//! Code System Kubernetes Operator +//! +//! Manages the lifecycle of: App, GitServer, EmailWorker, GitHook, Migrate CRDs. + +use operator::context::ReconcileCtx; +use std::sync::Arc; +use tracing::{Level, error, info}; +use tracing_subscriber::FmtSubscriber; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + // ---- Logging ---- + let log_level = std::env::var("OPERATOR_LOG_LEVEL").unwrap_or_else(|_| "info".to_string()); + let level = match log_level.to_lowercase().as_str() { + "trace" => Level::TRACE, + "debug" => Level::DEBUG, + "info" => Level::INFO, + "warn" => Level::WARN, + "error" => Level::ERROR, + _ => Level::INFO, + }; + FmtSubscriber::builder() + .with_max_level(level) + .with_target(false) + .with_thread_ids(false) + .with_file(true) + .with_line_number(true) + .compact() + .init(); + + let ctx = Arc::new(ReconcileCtx::from_env().await?); + info!( + namespace = ctx.operator_namespace, + image_prefix = ctx.image_prefix, + "code-operator starting" + ); + + // ---- Spawn all 5 controllers ---- + let app_handle = tokio::spawn({ + let ctx = ctx.clone(); + let client = ctx.client.clone(); + async move { + use operator::controller; + if let Err(e) = controller::start_app(client, ctx).await { + error!(%e, "app controller stopped"); + } + } + }); + + let gs_handle = tokio::spawn({ + let ctx = ctx.clone(); + let client = ctx.client.clone(); + async move { + use operator::controller; + if let Err(e) = controller::start_gitserver(client, ctx).await { + error!(%e, "gitserver controller stopped"); + } + } + }); + + let ew_handle = tokio::spawn({ + let ctx = ctx.clone(); + let client = ctx.client.clone(); + async move { + use operator::controller; + if let Err(e) = controller::start_email_worker(client, ctx).await { + error!(%e, "email_worker controller stopped"); + } + } + }); + + let gh_handle = tokio::spawn({ + let ctx = ctx.clone(); + let client = ctx.client.clone(); + async move { + use operator::controller; + if let Err(e) = controller::start_git_hook(client, ctx).await { + error!(%e, "git_hook controller stopped"); + } + } + }); + + let mig_handle = tokio::spawn({ + let ctx = ctx.clone(); + let client = ctx.client.clone(); + async move { + use operator::controller; + if let Err(e) = controller::start_migrate(client, ctx).await { + error!(%e, "migrate controller stopped"); + } + } + }); + + // ---- Graceful shutdown on SIGINT / SIGTERM ---- + tokio::signal::ctrl_c().await.ok(); + + info!("code-operator stopped"); + let _ = tokio::join!(app_handle, gs_handle, ew_handle, gh_handle, mig_handle,); + Ok(()) +} diff --git a/components.json b/components.json new file mode 100644 index 0000000..15addee --- /dev/null +++ b/components.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "base-nova", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "", + "css": "src/index.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "iconLibrary": "lucide", + "rtl": false, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "menuColor": "default", + "menuAccent": "subtle", + "registries": {} +} diff --git a/deploy/Chart.yaml b/deploy/Chart.yaml new file mode 100644 index 0000000..3f14379 --- /dev/null +++ b/deploy/Chart.yaml @@ -0,0 +1,13 @@ +apiVersion: v2 +name: c-----code +description: Self-hosted GitHub + Slack alternative platform +type: application +version: 0.1.0 +appVersion: "0.1.0" +keywords: + - git + - collaboration + - self-hosted +maintainers: + - name: C-----code Team + email: team@c.dev diff --git a/deploy/templates/NOTES.txt b/deploy/templates/NOTES.txt new file mode 100644 index 0000000..c044cee --- /dev/null +++ b/deploy/templates/NOTES.txt @@ -0,0 +1,35 @@ +{{/* Helm NOTES.txt – shown after install/upgrade */}} +{{- if .Release.IsInstall }} +🎉 {{ .Chart.Name }} {{ .Chart.Version }} installed in namespace {{ .Release.Namespace }}. + +⚠️ Prerequisites you must fulfil before the app starts: + + 1. PostgreSQL database is reachable. + 2. Redis is reachable. + 3. (Optional) NATS if HOOK_POOL is enabled. + 4. (Optional) Qdrant if AI embeddings are used. + +📋 Required Secret "{{ .Release.Name }}-secrets" (create manually or via external secrets): + + apiVersion: v1 + kind: Secret + metadata: + name: {{ .Release.Name }}-secrets + namespace: {{ .Release.Namespace }} + type: Opaque + stringData: + APP_DATABASE_URL: postgresql://user:password@postgres:5432/db + APP_REDIS_URL: redis://redis:6379 + # APP_SMTP_PASSWORD: ... + # APP_QDRANT_API_KEY: ... + + Or set .Values.secrets in values.yaml. + +🔄 To run database migrations: + helm upgrade {{ .Release.Name }} ./c-----code -n {{ .Release.Namespace }} \ + --set migrate.enabled=true + +📖 Useful commands: + kubectl get pods -n {{ .Release.Namespace }} + kubectl logs -n {{ .Release.Namespace }} -l app.kubernetes.io/name={{ .Chart.Name }} +{{- end }} diff --git a/deploy/templates/_helpers.tpl b/deploy/templates/_helpers.tpl new file mode 100644 index 0000000..4f7e72b --- /dev/null +++ b/deploy/templates/_helpers.tpl @@ -0,0 +1,44 @@ +{{/* ============================================================================= + Common helpers + ============================================================================= */}} + +{{- define "c-----code.fullname" -}} +{{- .Release.Name -}} +{{- end -}} + +{{- define "c-----code.namespace" -}} +{{- .Values.namespace | default .Release.Namespace -}} +{{- end -}} + +{{- define "c-----code.image" -}} +{{- $registry := .Values.image.registry -}} +{{- $pullPolicy := .Values.image.pullPolicy -}} +{{- printf "%s/%s:%s" $registry .image.repository .image.tag -}} +{{- end -}} + +{{/* Inject image pull policy into sub-chart image dict */}} +{{- define "c-----code.mergeImage" -}} +{{- $merged := dict "pullPolicy" $.Values.image.pullPolicy -}} +{{- $merged = merge $merged .image -}} +{{- printf "%s/%s:%s" $.Values.image.registry $merged.repository $merged.tag -}} +{{- end -}} + +{{/* Build a key-value env var list, optionally reading from a Secret */}} +{{- define "c-----code.envFromSecret" -}} +{{- $secretName := .existingSecret -}} +{{- $keys := .secretKeys -}} +{{- $result := list -}} +{{- range $envName, $secretKey := $keys -}} +{{- $item := dict "name" $envName "valueFrom" (dict "secretKeyRef" (dict "name" $secretName "key" $secretKey)) -}} +{{- $result = append $result $item -}} +{{- end -}} +{{- $result | toJson | fromJson -}} +{{- end -}} + +{{/* Merge two env lists (extra env over auto-injected) */}} +{{- define "c-----code.mergeEnv" -}} +{{- $auto := .auto -}} +{{- $extra := .extra | default list -}} +{{- $merged := append $auto $extra | toJson | fromJson -}} +{{- $merged | toYaml -}} +{{- end -}} diff --git a/deploy/templates/app-deployment.yaml b/deploy/templates/app-deployment.yaml new file mode 100644 index 0000000..d2eb191 --- /dev/null +++ b/deploy/templates/app-deployment.yaml @@ -0,0 +1,111 @@ +{{- if .Values.app.enabled -}} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "c-----code.fullname" . }}-app + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +spec: + replicas: {{ .Values.app.replicaCount }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} + spec: + containers: + - name: app + image: "{{ .Values.image.registry }}/{{ .Values.app.image.repository }}:{{ .Values.app.image.tag }}" + imagePullPolicy: {{ .Values.app.image.pullPolicy | default .Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ .Values.app.service.port }} + protocol: TCP + env: + - name: APP_DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ .Values.database.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.database.secretKeys.url }} + optional: true + - name: APP_REDIS_URL + valueFrom: + secretKeyRef: + name: {{ .Values.redis.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.redis.secretKeys.url }} + optional: true + {{- if .Values.nats.enabled }} + - name: HOOK_POOL_REDIS_LIST_PREFIX + value: "{hook}" + - name: HOOK_POOL_REDIS_LOG_CHANNEL + value: "hook:logs" + {{- end }} + {{- if .Values.qdrant.enabled }} + - name: APP_QDRANT_URL + value: {{ .Values.qdrant.url }} + {{- if and .Values.qdrant.existingSecret .Values.qdrant.secretKeys.apiKey }} + - name: APP_QDRANT_API_KEY + valueFrom: + secretKeyRef: + name: {{ .Values.qdrant.existingSecret }} + key: {{ .Values.qdrant.secretKeys.apiKey }} + optional: true + {{- end }} + {{- end }} + {{- range .Values.app.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} + livenessProbe: + httpGet: + path: {{ .Values.app.livenessProbe.path }} + port: {{ .Values.app.livenessProbe.port }} + initialDelaySeconds: {{ .Values.app.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.app.livenessProbe.periodSeconds }} + readinessProbe: + httpGet: + path: {{ .Values.app.readinessProbe.path }} + port: {{ .Values.app.readinessProbe.port }} + initialDelaySeconds: {{ .Values.app.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.app.readinessProbe.periodSeconds }} + resources: + {{- toYaml .Values.app.resources | nindent 10 }} + {{- with .Values.app.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.app.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.app.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ include "c-----code.fullname" . }}-app + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} +spec: + type: {{ .Values.app.service.type }} + ports: + - port: {{ .Values.app.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/deploy/templates/configmap.yaml b/deploy/templates/configmap.yaml new file mode 100644 index 0000000..02add79 --- /dev/null +++ b/deploy/templates/configmap.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "c-----code.fullname" . }}-config + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ .Chart.Name }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +data: +{{- if .Values.app.config }} +{{- range $key, $value := .Values.app.config }} + {{ $key }}: {{ $value | quote }} +{{- end }} +{{- end }} diff --git a/deploy/templates/email-worker-deployment.yaml b/deploy/templates/email-worker-deployment.yaml new file mode 100644 index 0000000..173c4a0 --- /dev/null +++ b/deploy/templates/email-worker-deployment.yaml @@ -0,0 +1,58 @@ +{{- if .Values.emailWorker.enabled -}} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "c-----code.fullname" . }}-email-worker + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-email-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-email-worker + app.kubernetes.io/instance: {{ .Release.Name }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-email-worker + app.kubernetes.io/instance: {{ .Release.Name }} + spec: + containers: + - name: email-worker + image: "{{ .Values.image.registry }}/{{ .Values.emailWorker.image.repository }}:{{ .Values.emailWorker.image.tag }}" + imagePullPolicy: {{ .Values.emailWorker.image.pullPolicy | default .Values.image.pullPolicy }} + env: + - name: APP_DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ .Values.database.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.database.secretKeys.url }} + optional: true + - name: APP_REDIS_URL + valueFrom: + secretKeyRef: + name: {{ .Values.redis.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.redis.secretKeys.url }} + optional: true + {{- range .Values.emailWorker.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} + resources: + {{- toYaml .Values.emailWorker.resources | nindent 10 }} + {{- with .Values.emailWorker.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.emailWorker.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.emailWorker.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} +{{- end }} diff --git a/deploy/templates/git-hook-deployment.yaml b/deploy/templates/git-hook-deployment.yaml new file mode 100644 index 0000000..17107ce --- /dev/null +++ b/deploy/templates/git-hook-deployment.yaml @@ -0,0 +1,64 @@ +{{- if .Values.gitHook.enabled -}} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "c-----code.fullname" . }}-git-hook + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-git-hook + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +spec: + replicas: {{ .Values.gitHook.replicaCount | default 2 }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-git-hook + app.kubernetes.io/instance: {{ .Release.Name }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-git-hook + app.kubernetes.io/instance: {{ .Release.Name }} + spec: + containers: + - name: git-hook + image: "{{ .Values.image.registry }}/{{ .Values.gitHook.image.repository }}:{{ .Values.gitHook.image.tag }}" + imagePullPolicy: {{ .Values.gitHook.image.pullPolicy | default .Values.image.pullPolicy }} + env: + - name: APP_DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ .Values.database.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.database.secretKeys.url }} + optional: true + - name: APP_REDIS_URL + valueFrom: + secretKeyRef: + name: {{ .Values.redis.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.redis.secretKeys.url }} + optional: true + {{- if .Values.nats.enabled }} + - name: HOOK_POOL_REDIS_LIST_PREFIX + value: "{hook}" + - name: HOOK_POOL_REDIS_LOG_CHANNEL + value: "hook:logs" + {{- end }} + {{- range .Values.gitHook.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} + resources: + {{- toYaml .Values.gitHook.resources | nindent 10 }} + {{- with .Values.gitHook.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.gitHook.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.gitHook.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} +{{- end }} diff --git a/deploy/templates/gitserver-deployment.yaml b/deploy/templates/gitserver-deployment.yaml new file mode 100644 index 0000000..388f381 --- /dev/null +++ b/deploy/templates/gitserver-deployment.yaml @@ -0,0 +1,162 @@ +{{- if .Values.gitserver.enabled -}} +{{- $fullName := include "c-----code.fullname" . -}} +{{- $ns := include "c-----code.namespace" . -}} +{{- $svc := .Values.gitserver -}} + +{{/* PersistentVolumeClaim for git repositories */}} +{{- if $svc.persistence.enabled }} +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ $fullName }}-repos + namespace: {{ $ns }} + labels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} +spec: + accessModes: + - {{ $svc.persistence.accessMode | default "ReadWriteOnce" }} + resources: + requests: + storage: {{ $svc.persistence.size }} + {{- if $svc.persistence.storageClass }} + storageClassName: {{ $svc.persistence.storageClass }} + {{- end }} +{{- end }} + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ $fullName }}-gitserver + namespace: {{ $ns }} + labels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} + app.kubernetes.io/version: {{ $.Chart.AppVersion }} +spec: + replicas: {{ $svc.replicaCount }} + selector: + matchLabels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} + spec: + containers: + - name: gitserver + image: "{{ $.Values.image.registry }}/{{ $svc.image.repository }}:{{ $svc.image.tag }}" + imagePullPolicy: {{ $svc.image.pullPolicy | default $.Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ $svc.service.http.port }} + protocol: TCP + - name: ssh + containerPort: {{ $svc.ssh.port }} + protocol: TCP + env: + - name: APP_REPOS_ROOT + value: /data/repos + - name: APP_DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ $.Values.database.existingSecret | default (printf "%s-secrets" $fullName) }} + key: {{ $.Values.database.secretKeys.url }} + optional: true + - name: APP_REDIS_URL + valueFrom: + secretKeyRef: + name: {{ $.Values.redis.existingSecret | default (printf "%s-secrets" $fullName) }} + key: {{ $.Values.redis.secretKeys.url }} + optional: true + {{- if $svc.ssh.domain }} + - name: APP_SSH_DOMAIN + value: {{ $svc.ssh.domain }} + {{- end }} + {{- if $svc.ssh.port }} + - name: APP_SSH_PORT + value: {{ $svc.ssh.port | quote }} + {{- end }} + {{- range $svc.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} + resources: + {{- toYaml $svc.resources | nindent 10 }} + volumeMounts: + {{- if $svc.persistence.enabled }} + - name: repos + mountPath: /data/repos + {{- end }} + volumes: + {{- if $svc.persistence.enabled }} + - name: repos + persistentVolumeClaim: + claimName: {{ $fullName }}-repos + {{- end }} + {{- with $svc.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $svc.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $svc.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + +--- +# HTTP service (git smart HTTP) +apiVersion: v1 +kind: Service +metadata: + name: {{ $fullName }}-gitserver-http + namespace: {{ $ns }} + labels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} +spec: + type: {{ $svc.service.http.type }} + ports: + - name: http + port: {{ $svc.service.http.port }} + targetPort: http + protocol: TCP + selector: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} + +--- +# SSH service (git over SSH) +apiVersion: v1 +kind: Service +metadata: + name: {{ $fullName }}-gitserver-ssh + namespace: {{ $ns }} + labels: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} +spec: + type: {{ $svc.service.ssh.type }} + {{- if eq $svc.service.ssh.type "NodePort" }} + ports: + - name: ssh + port: {{ $svc.ssh.port }} + targetPort: ssh + nodePort: {{ $svc.service.ssh.nodePort }} + {{- else }} + ports: + - name: ssh + port: {{ $svc.ssh.port }} + targetPort: ssh + {{- end }} + selector: + app.kubernetes.io/name: {{ $fullName }}-gitserver + app.kubernetes.io/instance: {{ $.Release.Name }} +{{- end }} diff --git a/deploy/templates/ingress.yaml b/deploy/templates/ingress.yaml new file mode 100644 index 0000000..6031323 --- /dev/null +++ b/deploy/templates/ingress.yaml @@ -0,0 +1,46 @@ +{{- if .Values.app.ingress.enabled -}} +{{- $svcName := printf "%s-app" (include "c-----code.fullname" .) -}} +{{- $ns := include "c-----code.namespace" . -}} +{{- $ing := .Values.app.ingress -}} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ include "c-----code.fullname" . }}-ingress + namespace: {{ $ns }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-app + app.kubernetes.io/instance: {{ .Release.Name }} + {{- with $ing.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if $ing.className }} + ingressClassName: {{ $ing.className }} + {{- end }} + {{- if $ing.tls }} + tls: + {{- range $ing.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range $ing.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + pathType: {{ .pathType | default "Prefix" }} + backend: + service: + name: {{ $svcName }} + port: + number: {{ $.Values.app.service.port }} + {{- end }} + {{- end }} +{{- end }} diff --git a/deploy/templates/migrate-job.yaml b/deploy/templates/migrate-job.yaml new file mode 100644 index 0000000..4023783 --- /dev/null +++ b/deploy/templates/migrate-job.yaml @@ -0,0 +1,42 @@ +{{- if .Values.migrate.enabled -}} +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "c-----code.fullname" . }}-migrate + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-migrate + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} + helm.sh/hook: post-install,post-upgrade + helm.sh/hook-delete-policy: before-hook-creation +spec: + backoffLimit: {{ .Values.migrate.backoffLimit }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-migrate + app.kubernetes.io/instance: {{ .Release.Name }} + spec: + restartPolicy: OnFailure + containers: + - name: migrate + image: "{{ .Values.image.registry }}/{{ .Values.migrate.image.repository }}:{{ .Values.migrate.image.tag }}" + imagePullPolicy: {{ .Values.migrate.image.pullPolicy | default .Values.image.pullPolicy }} + command: + {{- if .Values.migrate.command }} + - {{ .Values.migrate.command }} + {{- else }} + - up + {{- end }} + env: + - name: APP_DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ .Values.database.existingSecret | default (printf "%s-secrets" (include "c-----code.fullname" .)) }} + key: {{ .Values.database.secretKeys.url }} + {{- range .Values.migrate.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} +{{- end }} diff --git a/deploy/templates/operator-deployment.yaml b/deploy/templates/operator-deployment.yaml new file mode 100644 index 0000000..19968a5 --- /dev/null +++ b/deploy/templates/operator-deployment.yaml @@ -0,0 +1,52 @@ +{{- if .Values.operator.enabled -}} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "c-----code.fullname" . }}-operator + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-operator + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-operator + app.kubernetes.io/instance: {{ .Release.Name }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-operator + app.kubernetes.io/instance: {{ .Release.Name }} + spec: + serviceAccountName: {{ include "c-----code.fullname" . }}-operator + containers: + - name: operator + image: "{{ .Values.image.registry }}/{{ .Values.operator.image.repository }}:{{ .Values.operator.image.tag }}" + imagePullPolicy: {{ .Values.operator.image.pullPolicy | default .Values.image.pullPolicy }} + resources: + {{- toYaml .Values.operator.resources | nindent 10 }} + {{- with .Values.operator.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.operator.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.operator.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "c-----code.fullname" . }}-operator + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ include "c-----code.fullname" . }}-operator + app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/deploy/templates/secret.yaml b/deploy/templates/secret.yaml new file mode 100644 index 0000000..b1b354e --- /dev/null +++ b/deploy/templates/secret.yaml @@ -0,0 +1,17 @@ +{{- /* Template for bootstrap secrets – replace with external secret manager in prod */ -}} +{{- if .Values.secrets }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "c-----code.fullname" . }}-secrets + namespace: {{ include "c-----code.namespace" . }} + labels: + app.kubernetes.io/name: {{ .Chart.Name }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/version: {{ .Chart.AppVersion }} +type: Opaque +stringData: +{{- range $key, $value := .Values.secrets }} + {{ $key }}: {{ $value | quote }} +{{- end }} +{{- end }} diff --git a/deploy/values.yaml b/deploy/values.yaml new file mode 100644 index 0000000..441064d --- /dev/null +++ b/deploy/values.yaml @@ -0,0 +1,262 @@ +# ============================================================================= +# Global / common settings +# ============================================================================= +namespace: c-----code +releaseName: c-----code + +image: + registry: harbor.gitdata.me/gta_team + pullPolicy: IfNotPresent + +# PostgreSQL (required) – set connection string via secret or values +database: + existingSecret: "" + secretKeys: + url: APP_DATABASE_URL + +# Redis (required) +redis: + existingSecret: "" + secretKeys: + url: APP_REDIS_URL + +# NATS (optional – required only if HOOK_POOL is enabled) +nats: + enabled: false + url: nats://nats:4222 + +# Qdrant (optional – required only if AI embeddings are used) +qdrant: + enabled: false + url: http://qdrant:6333 + existingSecret: "" + secretKeys: + apiKey: APP_QDRANT_API_KEY + +# ============================================================================= +# App – main web/API service +# ============================================================================= +app: + enabled: true + replicaCount: 3 + + image: + repository: app + tag: latest + + service: + type: ClusterIP + port: 8080 + + ingress: + enabled: false + className: cilium # Cilium Ingress (or envoy for EnvoyGateway) + annotations: {} + hosts: + - host: c-----.local + paths: + - path: / + pathType: Prefix + tls: [] + + resources: + requests: + cpu: 100m + memory: 256Mi + limits: + cpu: 1000m + memory: 1Gi + + livenessProbe: + path: /health + port: 8080 + initialDelaySeconds: 5 + periodSeconds: 10 + + readinessProbe: + path: /health + port: 8080 + initialDelaySeconds: 5 + periodSeconds: 5 + + # Extra env vars (merge with auto-injected ones) + env: [] + + nodeSelector: {} + tolerations: [] + affinity: {} + +# ============================================================================= +# Gitserver – git daemon / SSH + HTTP server +# ============================================================================= +gitserver: + enabled: true + replicaCount: 1 + + image: + repository: gitserver + tag: latest + + service: + http: + type: ClusterIP + port: 8022 + ssh: + type: NodePort + nodePort: 30222 + + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + + # Storage for git repos + persistence: + enabled: true + storageClass: "" + size: 50Gi + accessMode: ReadWriteOnce + + ssh: + domain: "" + port: 22 + + env: [] + + nodeSelector: {} + tolerations: [] + affinity: {} + +# ============================================================================= +# Email worker – processes outgoing email queue +# ============================================================================= +emailWorker: + enabled: true + + image: + repository: email-worker + tag: latest + + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + cpu: 200m + memory: 256Mi + + env: [] + + nodeSelector: {} + tolerations: [] + affinity: {} + +# ============================================================================= +# Git hook pool – handles pre-receive / post-receive hooks +# ============================================================================= +gitHook: + enabled: true + + image: + repository: git-hook + tag: latest + + replicaCount: 2 + + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + cpu: 200m + memory: 256Mi + + env: [] + + nodeSelector: {} + tolerations: [] + affinity: {} + +# ============================================================================= +# Migrate – database migration Job (runOnce) +# ============================================================================= +migrate: + enabled: false # Set true to run migrations on upgrade + + image: + repository: migrate + tag: latest + + command: up + backoffLimit: 3 + + env: [] + +# ============================================================================= +# Operator – Kubernetes operator (manages custom App/GitServer CRDs) +# ============================================================================= +operator: + enabled: false # Enable only if running the custom operator + + image: + repository: operator + tag: latest + + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + cpu: 200m + memory: 256Mi + + nodeSelector: {} + tolerations: [] + affinity: {} + +# ============================================================================= +# Act Runner – Gitea Actions self-hosted runner +# ============================================================================= +actRunner: + enabled: false + + image: + repository: act-runner + tag: latest + + replicaCount: 2 + + # Concurrency per runner instance + capacity: 2 + + # Runner labels (must match workflow `runs-on`) + labels: + - gitea + - docker + + logLevel: info + + cache: + enabled: true + dir: /tmp/actions-cache + + resources: + requests: + cpu: 500m + memory: 1Gi + limits: + cpu: 2000m + memory: 4Gi + + env: [] + + nodeSelector: {} + tolerations: + - key: "runner" + operator: "Equal" + value: "true" + effect: "NoSchedule" + affinity: {} diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile new file mode 100644 index 0000000..9a446b8 --- /dev/null +++ b/docker/app.Dockerfile @@ -0,0 +1,41 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +# Build dependencies: OpenSSL, libgit2, zlib, clang for sea-orm codegen +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +# Copy workspace manifests +COPY Cargo.toml Cargo.lock ./ +COPY libs/ libs/ +COPY apps/app/ apps/app/ + +# Pre-build dependencies only +RUN cargo fetch + +# Build the binary +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package app --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim AS runtime + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/app /app/app + +# All config via environment variables (APP_* prefix) +ENV APP_LOG_LEVEL=info +ENTRYPOINT ["/app/app"] diff --git a/docker/build.md b/docker/build.md new file mode 100644 index 0000000..ff9b0de --- /dev/null +++ b/docker/build.md @@ -0,0 +1,171 @@ +# Docker 构建指南 + +## 前提条件 + +- Docker 20.10+ +- Cargo.lock 已存在(`cargo generate-lockfile`) +- 网络能够访问 crates.io + +## 快速开始 + +```bash +# 构建全部镜像(默认 registry=myapp, tag=latest) +./docker/build.sh + +# 构建指定镜像 +./docker/build.sh app +./docker/build.sh gitserver email-worker + +# 指定 registry 和 tag +REGISTRY=myregistry TAG=v1.0.0 ./docker/build.sh +``` + +## 镜像列表 + +| 镜像 | Dockerfile | 二进制 | 实例类型 | 说明 | +|---|---|---|---|---| +| `myapp/app:latest` | `app.Dockerfile` | `app` | 多实例 | 主 Web 服务(API + WS) | +| `myapp/gitserver:latest` | `gitserver.Dockerfile` | `gitserver` | 单实例 | Git HTTP + SSH 服务 | +| `myapp/email-worker:latest` | `email-worker.Dockerfile` | `email-worker` | 单实例 | 邮件发送 Worker | +| `myapp/git-hook:latest` | `git-hook.Dockerfile` | `git-hook` | 单实例 | Git Hook 事件处理 | +| `myapp/migrate:latest` | `migrate.Dockerfile` | `migrate` | Job/InitContainer | 数据库迁移 CLI | + +## 部署架构 + +``` + ┌─ NATS ─┐ + │ │ +┌─────────┐ ┌──────────────┐ ┌─────────────────┐ +│ LB/ │───▶│ app (×N) │ │ git-hook │ +│ nginx │ │ (stateless) │ │ (单实例) │ +└─────────┘ └──────────────┘ └─────────────────┘ + ┌──────────────┐ + │ gitserver │ + │ (单实例) │ ┌─────────────────┐ + │ HTTP :8022 │───▶│ email-worker │ + │ SSH :2222 │ │ (单实例) │ + └──────────────┘ └─────────────────┘ +``` + +## 环境变量 + +所有配置通过环境变量注入,无需修改镜像: + +| 变量 | 示例 | 说明 | +|---|---|---| +| `APP_DATABASE_URL` | `postgres://user:pass@host:5432/db` | 数据库连接 | +| `APP_REDIS_URLS` | `redis://host:6379` | Redis(多实例用逗号分隔) | +| `APP_SMTP_HOST` | `smtp.example.com` | SMTP 服务器 | +| `APP_SMTP_USERNAME` | `noreply@example.com` | SMTP 用户名 | +| `APP_SMTP_PASSWORD` | `xxx` | SMTP 密码 | +| `APP_SMTP_FROM` | `noreply@example.com` | 发件人地址 | +| `APP_AI_BASIC_URL` | `https://api.openai.com/v1` | AI API 地址 | +| `APP_AI_API_KEY` | `sk-xxx` | AI API Key | +| `APP_DOMAIN_URL` | `https://example.com` | 主域名 | +| `APP_LOG_LEVEL` | `info` | 日志级别: trace/debug/info/warn/error | +| `APP_SSH_DOMAIN` | `git.example.com` | Git SSH 域名 | +| `APP_REPOS_ROOT` | `/data/repos` | Git 仓库存储路径 | +| `NATS_URL` | `nats://localhost:4222` | NATS 服务器地址 | + +## 数据库迁移 + +镜像启动前先运行迁移: + +```bash +# 方式一:直接运行 +docker run --rm \ + --env-file .env \ + myapp/migrate:latest up + +# 方式二:Kubernetes InitContainer +# 见下方 K8s 示例 +``` + +## Kubernetes 部署示例 + +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: app +spec: + replicas: 3 + template: + spec: + containers: + - name: app + image: myapp/app:latest + envFrom: + - secretRef: + name: app-secrets +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: gitserver +spec: + replicas: 1 + template: + spec: + containers: + - name: gitserver + image: myapp/gitserver:latest + ports: + - containerPort: 8022 # HTTP + - containerPort: 2222 # SSH + envFrom: + - secretRef: + name: app-secrets + volumeMounts: + - name: repos + mountPath: /data/repos + volumes: + - name: repos + persistentVolumeClaim: + claimName: git-repos +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: migrate +spec: + template: + spec: + containers: + - name: migrate + image: myapp/migrate:latest + envFrom: + - secretRef: + name: app-secrets + args: ["up"] + restartPolicy: Never +``` + +## 构建缓存 + +使用 Docker BuildKit 的构建缓存: +- `--mount=type=cache,target=/usr/local/cargo/registry` — crates.io 依赖 +- `--mount=type=cache,target=/usr/local/cargo/git` — git 依赖 +- `--mount=type=cache,target=target` — 编译产物 + +建议挂载持久化缓存卷以加速增量构建: + +```bash +docker buildx create --use +docker buildx build \ + --cache-from=type=local,src=/tmp/cargo-cache \ + --cache-to=type=local,dest=/tmp/cargo-cache \ + -f docker/app.Dockerfile -t myapp/app . +``` + +## 跨平台构建 + +默认构建 x86_64 Linux 可执行文件。构建其他平台: + +```bash +# ARM64 +BUILD_TARGET=aarch64-unknown-linux-gnu ./docker/build.sh + +# 需先安装对应 target: +rustup target add aarch64-unknown-linux-gnu +``` diff --git a/docker/build.sh b/docker/build.sh new file mode 100644 index 0000000..fedf3ef --- /dev/null +++ b/docker/build.sh @@ -0,0 +1,52 @@ +#!/bin/bash +set -e + +REGISTRY="${REGISTRY:-harbor.gitdata.me/gta_team}" +TAG="${TAG:-latest}" +BUILD_TARGET="${BUILD_TARGET:-x86_64-unknown-linux-gnu}" + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR/.." + +# All images: (dockerfile, image-name) +declare -A ALL_IMAGES=( + [app]="docker/app.Dockerfile" + [gitserver]="docker/gitserver.Dockerfile" + [email-worker]="docker/email-worker.Dockerfile" + [git-hook]="docker/git-hook.Dockerfile" + [migrate]="docker/migrate.Dockerfile" + [operator]="docker/operator.Dockerfile" +) + +# Filter by first argument if provided +TARGETS=("$@") +if [[ ${#TARGETS[@]} -eq 0 ]] || [[ "${TARGETS[0]}" == "all" ]]; then + TARGETS=("${!ALL_IMAGES[@]}") +fi + +for name in "${TARGETS[@]}"; do + df="${ALL_IMAGES[$name]}" + if [[ -z "$df" ]]; then + echo "ERROR: unknown image '$name'" + echo "Available: ${!ALL_IMAGES[@]}" + exit 1 + fi + if [[ ! -f "$df" ]]; then + echo "ERROR: $df not found" + exit 1 + fi + image="${REGISTRY}/${name}:${TAG}" + echo "==> Building $image" + docker build \ + --build-arg BUILD_TARGET="${BUILD_TARGET}" \ + -f "$df" \ + -t "$image" \ + . + echo "==> $image done" + echo "" +done + +echo "==> All images built:" +for name in "${TARGETS[@]}"; do + echo " ${REGISTRY}/${name}:${TAG}" +done diff --git a/docker/crd/app-crd.yaml b/docker/crd/app-crd.yaml new file mode 100644 index 0000000..94addce --- /dev/null +++ b/docker/crd/app-crd.yaml @@ -0,0 +1,127 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: apps.code.dev + annotations: + controller-gen.kubebuilder.io/version: v0.16.0 +spec: + group: code.dev + names: + kind: App + listKind: AppList + plural: apps + singular: app + shortNames: + - app + scope: Namespaced + versions: + - name: v1 + served: true + storage: true + subresources: + status: {} + additionalPrinterColumns: + - name: Replicas + jsonPath: .spec.replicas + type: integer + - name: Ready + jsonPath: .status.phase + type: string + - name: Age + jsonPath: .metadata.creationTimestamp + type: date + schema: + openAPIV3Schema: + type: object + required: [spec] + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + required: [] + properties: + image: + type: string + default: myapp/app:latest + replicas: + type: integer + default: 3 + env: + type: array + items: + type: object + required: [name] + properties: + name: + type: string + value: + type: string + valueFrom: + type: object + properties: + secretRef: + type: object + required: [name, secretName, secretKey] + properties: + name: + type: string + secretName: + type: string + secretKey: + type: string + resources: + type: object + properties: + requests: + type: object + properties: + cpu: + type: string + memory: + type: string + limits: + type: object + properties: + cpu: + type: string + memory: + type: string + livenessProbe: + type: object + properties: + port: + type: integer + default: 8080 + path: + type: string + default: /health + initialDelaySeconds: + type: integer + default: 5 + readinessProbe: + type: object + properties: + port: + type: integer + default: 8080 + path: + type: string + default: /health + initialDelaySeconds: + type: integer + default: 5 + imagePullPolicy: + type: string + default: IfNotPresent + status: + type: object + properties: + readyReplicas: + type: integer + phase: + type: string diff --git a/docker/crd/email-worker-crd.yaml b/docker/crd/email-worker-crd.yaml new file mode 100644 index 0000000..ba22be2 --- /dev/null +++ b/docker/crd/email-worker-crd.yaml @@ -0,0 +1,94 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: emailworkers.code.dev + annotations: + controller-gen.kubebuilder.io/version: v0.16.0 +spec: + group: code.dev + names: + kind: EmailWorker + listKind: EmailWorkerList + plural: emailworkers + singular: emailworker + shortNames: + - ew + scope: Namespaced + versions: + - name: v1 + served: true + storage: true + subresources: + status: {} + additionalPrinterColumns: + - name: Age + jsonPath: .metadata.creationTimestamp + type: date + schema: + openAPIV3Schema: + type: object + required: [spec] + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + required: [] + properties: + image: + type: string + default: myapp/email-worker:latest + env: + type: array + items: + type: object + required: [name] + properties: + name: + type: string + value: + type: string + valueFrom: + type: object + properties: + secretRef: + type: object + required: [name, secretName, secretKey] + properties: + name: + type: string + secretName: + type: string + secretKey: + type: string + resources: + type: object + properties: + requests: + type: object + properties: + cpu: + type: string + memory: + type: string + limits: + type: object + properties: + cpu: + type: string + memory: + type: string + imagePullPolicy: + type: string + default: IfNotPresent + status: + type: object + properties: + readyReplicas: + type: integer + phase: + type: string diff --git a/docker/crd/git-hook-crd.yaml b/docker/crd/git-hook-crd.yaml new file mode 100644 index 0000000..7123c85 --- /dev/null +++ b/docker/crd/git-hook-crd.yaml @@ -0,0 +1,96 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: githooks.code.dev + annotations: + controller-gen.kubebuilder.io/version: v0.16.0 +spec: + group: code.dev + names: + kind: GitHook + listKind: GitHookList + plural: githooks + singular: githook + shortNames: + - ghk + scope: Namespaced + versions: + - name: v1 + served: true + storage: true + subresources: + status: {} + additionalPrinterColumns: + - name: Age + jsonPath: .metadata.creationTimestamp + type: date + schema: + openAPIV3Schema: + type: object + required: [spec] + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + required: [] + properties: + image: + type: string + default: myapp/git-hook:latest + env: + type: array + items: + type: object + required: [name] + properties: + name: + type: string + value: + type: string + valueFrom: + type: object + properties: + secretRef: + type: object + required: [name, secretName, secretKey] + properties: + name: + type: string + secretName: + type: string + secretKey: + type: string + resources: + type: object + properties: + requests: + type: object + properties: + cpu: + type: string + memory: + type: string + limits: + type: object + properties: + cpu: + type: string + memory: + type: string + imagePullPolicy: + type: string + default: IfNotPresent + workerId: + type: string + status: + type: object + properties: + readyReplicas: + type: integer + phase: + type: string diff --git a/docker/crd/gitserver-crd.yaml b/docker/crd/gitserver-crd.yaml new file mode 100644 index 0000000..fa60874 --- /dev/null +++ b/docker/crd/gitserver-crd.yaml @@ -0,0 +1,108 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: gitservers.code.dev + annotations: + controller-gen.kubebuilder.io/version: v0.16.0 +spec: + group: code.dev + names: + kind: GitServer + listKind: GitServerList + plural: gitservers + singular: gitserver + shortNames: + - gs + scope: Namespaced + versions: + - name: v1 + served: true + storage: true + subresources: + status: {} + additionalPrinterColumns: + - name: Age + jsonPath: .metadata.creationTimestamp + type: date + schema: + openAPIV3Schema: + type: object + required: [spec] + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + required: [] + properties: + image: + type: string + default: myapp/gitserver:latest + env: + type: array + items: + type: object + required: [name] + properties: + name: + type: string + value: + type: string + valueFrom: + type: object + properties: + secretRef: + type: object + required: [name, secretName, secretKey] + properties: + name: + type: string + secretName: + type: string + secretKey: + type: string + resources: + type: object + properties: + requests: + type: object + properties: + cpu: + type: string + memory: + type: string + limits: + type: object + properties: + cpu: + type: string + memory: + type: string + sshServiceType: + type: string + default: NodePort + storageSize: + type: string + default: 10Gi + imagePullPolicy: + type: string + default: IfNotPresent + sshDomain: + type: string + sshPort: + type: integer + default: 22 + httpPort: + type: integer + default: 8022 + status: + type: object + properties: + readyReplicas: + type: integer + phase: + type: string diff --git a/docker/crd/migrate-crd.yaml b/docker/crd/migrate-crd.yaml new file mode 100644 index 0000000..255d150 --- /dev/null +++ b/docker/crd/migrate-crd.yaml @@ -0,0 +1,87 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: migrates.code.dev + annotations: + controller-gen.kubebuilder.io/version: v0.16.0 +spec: + group: code.dev + names: + kind: Migrate + listKind: MigrateList + plural: migrates + singular: migrate + shortNames: + - mig + scope: Namespaced + versions: + - name: v1 + served: true + storage: true + subresources: + status: {} + additionalPrinterColumns: + - name: Status + jsonPath: .status.phase + type: string + - name: Age + jsonPath: .metadata.creationTimestamp + type: date + schema: + openAPIV3Schema: + type: object + required: [spec] + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + type: object + required: [] + properties: + image: + type: string + default: myapp/migrate:latest + env: + type: array + description: "Must include APP_DATABASE_URL" + items: + type: object + required: [name] + properties: + name: + type: string + value: + type: string + valueFrom: + type: object + properties: + secretRef: + type: object + required: [name, secretName, secretKey] + properties: + name: + type: string + secretName: + type: string + secretKey: + type: string + command: + type: string + default: up + description: "Migration command: up, down, fresh, refresh, reset" + backoffLimit: + type: integer + default: 3 + status: + type: object + properties: + phase: + type: string + startTime: + type: string + completionTime: + type: string diff --git a/docker/email-worker.Dockerfile b/docker/email-worker.Dockerfile new file mode 100644 index 0000000..62f8de0 --- /dev/null +++ b/docker/email-worker.Dockerfile @@ -0,0 +1,36 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY libs/ libs/ +COPY apps/email/ apps/email/ + +RUN cargo fetch + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package email-server --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/email-server /app/email-worker + +ENV APP_LOG_LEVEL=info +ENTRYPOINT ["/app/email-worker"] diff --git a/docker/git-hook.Dockerfile b/docker/git-hook.Dockerfile new file mode 100644 index 0000000..2dc885a --- /dev/null +++ b/docker/git-hook.Dockerfile @@ -0,0 +1,36 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libgit2-dev zlib1g-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY libs/ libs/ +COPY apps/git-hook/ apps/git-hook/ + +RUN cargo fetch + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package git-hook --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 openssh-client \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/git-hook /app/git-hook + +ENV APP_LOG_LEVEL=info +ENTRYPOINT ["/app/git-hook"] diff --git a/docker/gitserver.Dockerfile b/docker/gitserver.Dockerfile new file mode 100644 index 0000000..9ef0e71 --- /dev/null +++ b/docker/gitserver.Dockerfile @@ -0,0 +1,41 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libgit2-dev zlib1g-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY libs/ libs/ +COPY apps/gitserver/ apps/gitserver/ + +RUN cargo fetch + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package gitserver --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim AS runtime + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 openssh-server \ + && rm -rf /var/lib/apt/lists/* + +# SSH requires host keys and proper permissions +RUN mkdir -p /run/sshd && \ + ssh-keygen -A && \ + chmod 755 /etc/ssh + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/gitserver /app/gitserver + +ENV APP_LOG_LEVEL=info +ENTRYPOINT ["/app/gitserver"] diff --git a/docker/migrate.Dockerfile b/docker/migrate.Dockerfile new file mode 100644 index 0000000..03da474 --- /dev/null +++ b/docker/migrate.Dockerfile @@ -0,0 +1,36 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY libs/ libs/ +COPY apps/migrate/ apps/migrate/ + +RUN cargo fetch + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package migrate-cli --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/migrate /app/migrate + +# Run migrations via: docker run --rm myapp/migrate up +ENTRYPOINT ["/app/migrate"] diff --git a/docker/operator.Dockerfile b/docker/operator.Dockerfile new file mode 100644 index 0000000..6e0b898 --- /dev/null +++ b/docker/operator.Dockerfile @@ -0,0 +1,39 @@ +# ---- Stage 1: Build ---- +FROM rust:1.94-bookworm AS builder + +ARG BUILD_TARGET=x86_64-unknown-linux-gnu +ENV TARGET=${BUILD_TARGET} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev libclang-dev \ + gcc g++ make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY libs/config/ libs/config/ +COPY apps/operator/ apps/operator/ + +RUN cargo fetch + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=target \ + cargo build --release --package operator --target ${TARGET} + +# ---- Stage 2: Runtime ---- +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates libssl3 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=builder /build/target/${TARGET}/release/operator /app/operator + +# The operator reads POD_NAMESPACE and OPERATOR_IMAGE_PREFIX from env. +# It connects to the in-cluster Kubernetes API via the service account token. +# All child resources are created in the operator's own namespace. +ENV OPERATOR_LOG_LEVEL=info +ENTRYPOINT ["/app/operator"] diff --git a/docker/operator/deployment.yaml b/docker/operator/deployment.yaml new file mode 100644 index 0000000..aed54d1 --- /dev/null +++ b/docker/operator/deployment.yaml @@ -0,0 +1,128 @@ +# ---- Namespace ---- +apiVersion: v1 +kind: Namespace +metadata: + name: code-system +--- +# ---- ServiceAccount ---- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: code-operator + namespace: code-system +--- +# ---- RBAC: Role ---- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: code-operator + namespace: code-system +rules: + # CRDs we manage + - apiGroups: ["code.dev"] + resources: ["apps", "gitservers", "emailworkers", "githooks", "migrates"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + + # Status subresources + - apiGroups: ["code.dev"] + resources: ["apps/status", "gitservers/status", "emailworkers/status", "githooks/status", "migrates/status"] + verbs: ["get", "patch", "update"] + + # Child resources managed by App + - apiGroups: ["apps"] + resources: ["deployments"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + - apiGroups: [""] + resources: ["services"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + + # Child resources managed by GitServer + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + + # Child resources managed by GitHook + - apiGroups: [""] + resources: ["configmaps"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] + + # Child resources managed by Migrate + - apiGroups: ["batch"] + resources: ["jobs"] + verbs: ["get", "list", "watch", "create", "update", "patch", "delete", "deletecollection"] + + # Secrets (read-only for env var resolution) + - apiGroups: [""] + resources: ["secrets"] + verbs: ["get", "list", "watch"] +--- +# ---- RBAC: RoleBinding ---- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: code-operator + namespace: code-system +subjects: + - kind: ServiceAccount + name: code-operator + namespace: code-system +roleRef: + kind: Role + name: code-operator + apiGroup: rbac.authorization.k8s.io +--- +# ---- Deployment ---- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: code-operator + namespace: code-system + labels: + app.kubernetes.io/name: code-operator + app.kubernetes.io/managed-by: code-operator + app.kubernetes.io/part-of: code-system +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: code-operator + template: + metadata: + labels: + app.kubernetes.io/name: code-operator + app.kubernetes.io/managed-by: code-operator + app.kubernetes.io/part-of: code-system + spec: + serviceAccountName: code-operator + terminationGracePeriodSeconds: 10 + volumes: + - name: tmp + emptyDir: {} + containers: + - name: operator + image: myapp/operator:latest + imagePullPolicy: IfNotPresent + env: + - name: OPERATOR_IMAGE_PREFIX + value: "myapp/" + - name: OPERATOR_LOG_LEVEL + value: "info" + - name: POD_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + resources: + requests: + cpu: 10m + memory: 64Mi + limits: + memory: 256Mi + volumeMounts: + - name: tmp + mountPath: /tmp + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: true + capabilities: + drop: + - ALL diff --git a/docker/operator/example/code-system.yaml b/docker/operator/example/code-system.yaml new file mode 100644 index 0000000..006d5ca --- /dev/null +++ b/docker/operator/example/code-system.yaml @@ -0,0 +1,280 @@ +# Example: deploying the full code system into `code-system` namespace. +# +# Prerequisites: +# 1. Install CRDs: kubectl apply -f ../crd/ +# 2. Install Operator: kubectl apply -f ../operator/deployment.yaml +# +# Then apply this file: +# kubectl apply -f example/code-system.yaml + +apiVersion: v1 +kind: Secret +metadata: + name: app-secrets + namespace: code-system +type: Opaque +stringData: + APP_DATABASE_URL: "postgres://user:password@postgres:5432/codedb?sslmode=disable" + APP_REDIS_URLS: "redis://redis:6379" + APP_SMTP_HOST: "smtp.example.com" + APP_SMTP_PORT: "587" + APP_SMTP_USERNAME: "noreply@example.com" + APP_SMTP_PASSWORD: "change-me" + APP_SMTP_FROM: "noreply@example.com" + APP_AI_BASIC_URL: "https://api.openai.com/v1" + APP_AI_API_KEY: "sk-change-me" + APP_SSH_SERVER_PRIVATE_KEY: | + -----BEGIN OPENSSH PRIVATE KEY----- + ... paste your SSH private key here ... + -----END OPENSSH PRIVATE KEY----- + APP_SSH_SERVER_PUBLIC_KEY: "ssh-ed25519 AAAAC3... your-pub-key" +--- +# ---- App (main web service, 3 replicas) ---- +apiVersion: code.dev/v1 +kind: App +metadata: + name: app + namespace: code-system +spec: + image: myapp/app:latest + replicas: 3 + imagePullPolicy: IfNotPresent + env: + - name: APP_DATABASE_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_DATABASE_URL + - name: APP_REDIS_URLS + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_REDIS_URLS + - name: APP_SMTP_HOST + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_HOST + - name: APP_SMTP_USERNAME + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_USERNAME + - name: APP_SMTP_PASSWORD + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_PASSWORD + - name: APP_SMTP_FROM + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_FROM + - name: APP_AI_BASIC_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_AI_BASIC_URL + - name: APP_AI_API_KEY + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_AI_API_KEY + - name: APP_DOMAIN_URL + value: "https://example.com" + - name: APP_LOG_LEVEL + value: "info" + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + livenessProbe: + port: 8080 + path: /health + initialDelaySeconds: 10 + readinessProbe: + port: 8080 + path: /health + initialDelaySeconds: 5 +--- +# ---- GitServer (git HTTP + SSH, single instance) ---- +apiVersion: code.dev/v1 +kind: GitServer +metadata: + name: gitserver + namespace: code-system +spec: + image: myapp/gitserver:latest + imagePullPolicy: IfNotPresent + env: + - name: APP_DATABASE_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_DATABASE_URL + - name: APP_REDIS_URLS + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_REDIS_URLS + - name: APP_SSH_SERVER_PRIVATE_KEY + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SSH_SERVER_PRIVATE_KEY + - name: APP_SSH_SERVER_PUBLIC_KEY + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SSH_SERVER_PUBLIC_KEY + - name: APP_SSH_DOMAIN + value: "git.example.com" + - name: APP_REPOS_ROOT + value: "/data/repos" + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 1000m + memory: 1Gi + sshServiceType: NodePort # Use LoadBalancer in production + sshPort: 22 + httpPort: 8022 + storageSize: 50Gi +--- +# ---- EmailWorker (single instance) ---- +apiVersion: code.dev/v1 +kind: EmailWorker +metadata: + name: email-worker + namespace: code-system +spec: + image: myapp/email-worker:latest + imagePullPolicy: IfNotPresent + env: + - name: APP_DATABASE_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_DATABASE_URL + - name: APP_REDIS_URLS + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_REDIS_URLS + - name: APP_SMTP_HOST + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_HOST + - name: APP_SMTP_USERNAME + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_USERNAME + - name: APP_SMTP_PASSWORD + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_PASSWORD + - name: APP_SMTP_FROM + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_SMTP_FROM + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + memory: 256Mi +--- +# ---- GitHook (single instance) ---- +apiVersion: code.dev/v1 +kind: GitHook +metadata: + name: git-hook + namespace: code-system +spec: + image: myapp/git-hook:latest + imagePullPolicy: IfNotPresent + env: + - name: APP_DATABASE_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_DATABASE_URL + - name: APP_REDIS_URLS + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_REDIS_URLS + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + memory: 256Mi +--- +# ---- Migrate (auto-triggered on apply) ---- +apiVersion: code.dev/v1 +kind: Migrate +metadata: + name: migrate + namespace: code-system +spec: + image: myapp/migrate:latest + command: up + backoffLimit: 3 + env: + - name: APP_DATABASE_URL + valueFrom: + secretRef: + name: app-secrets + secretName: app-secrets + secretKey: APP_DATABASE_URL +--- +# ---- Ingress (example for App) ---- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: app-ingress + namespace: code-system + annotations: + nginx.ingress.kubernetes.io/proxy-body-size: "100m" +spec: + rules: + - host: example.com + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: app + port: + number: 80 diff --git a/docs/ARCHITECTURE-LAYERS.md b/docs/ARCHITECTURE-LAYERS.md new file mode 100644 index 0000000..2332600 --- /dev/null +++ b/docs/ARCHITECTURE-LAYERS.md @@ -0,0 +1,903 @@ +# Code 项目架构分层图 + +> 一个现代化的代码协作与团队沟通平台 +> +> 技术栈:Rust (后端) + TypeScript/React (前端) + Kubernetes (部署) + +--- + +## 系统全景架构 + +``` +┌─────────────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 用 户 层 │ +│ │ +│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────┐ │ +│ │ Web 浏览器 │ │ Git 客户端 │ │ 外部 CI/CD │ │ +│ │ (React SPA) │ │ (git/SSH) │ │ (GitHub/GitLab) │ │ +│ └────────┬─────────┘ └────────┬─────────┘ └────────┬─────────┘ │ +└──────────────────┼────────────────────────────────┼────────────────────────────────┼────────────────┘ + │ │ │ + │ HTTP/WS │ Git Protocol │ Webhook + │ │ │ +┌──────────────────▼────────────────────────────────▼────────────────────────────────▼────────────────┐ +│ 接入层 (Ingress/LB) │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ Load Balancer / K8s Ingress (:80/:443) │ │ +│ └──────────────────────┬──────────────────────┬──────────────────────┬─────────────┘ │ +└────────────────────────────────┼──────────────────────┼──────────────────────┼──────────────────────┘ + │ │ │ + │ REST API │ Git Ops │ Webhook + │ │ │ +┌────────────────────────────────▼──────────────────────▼──────────────────────▼──────────────────────┐ +│ 应 用 服 务 层 (apps/) │ +│ │ +│ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ │ +│ │ apps/app │ │ apps/gitserver │ │ apps/git-hook │ │ apps/email │ │ +│ │ 主 Web API 服务 │ │ Git HTTP/SSH 服务 │ │ Git Hook 处理器 │ │ 邮件发送 Worker │ │ +│ │ :8080 │ │ :8021/:2222 │ │ Worker │ │ Worker │ │ +│ │ HTTP + WebSocket │ │ HTTP + SSH │ │ 异步任务 │ │ 队列消费 │ │ +│ │ 多实例部署 │ │ 单实例 │ │ 单实例 │ │ 单实例 │ │ +│ └─────────┬──────────┘ └─────────┬──────────┘ └─────────┬──────────┘ └─────────┬──────────┘ │ +└─────────────┼───────────────────────┼───────────────────────┼───────────────────────┼───────────────┘ + │ │ │ │ + │ │ │ │ +┌─────────────▼───────────────────────▼───────────────────────▼───────────────────────▼───────────────┐ +│ 应 用 编 排 层 (apps/operator) │ +│ │ +│ ┌───────────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ apps/operator (Kubernetes Operator) │ │ +│ │ │ │ +│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │ +│ │ │ App CRD │ │GitSrv CRD│ │Email CRD │ │Hook CRD │ │Mig CRD │ │ │ +│ │ └────┬─────┘ └────┬─────┘ └────┬─────┘ └────┬─────┘ └────┬─────┘ │ │ +│ │ │ │ │ │ │ │ │ +│ │ ▼ ▼ ▼ ▼ ▼ │ │ +│ │ ┌──────────────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ K8s 资源 (Deployments, Services, PVCs, Jobs) │ │ │ +│ │ └──────────────────────────────────────────────────────────────────────────┘ │ │ +│ └───────────────────────────────────────────────────────────────────────────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────────────┘ + │ │ │ │ + │ │ │ │ +┌─────────────▼───────────────────────▼───────────────────────▼───────────────────────▼───────────────┐ +│ 业 务 逻 辑 层 (libs/service) │ +│ │ +│ ┌─────────────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ AppService { 全局服务聚合 } │ │ +│ │ │ │ +│ │ ┌─────────┐ ┌────────┐ ┌─────┐ ┌──────────┐ ┌───────────┐ ┌────────┐ ┌───────────┐ │ │ +│ │ │ agent/ │ │ auth/ │ │git/ │ │ issue/ │ │ project/ │ │ user/ │ │ pull_req/ │ │ │ +│ │ │ (8文件) │ │ (10) │ │(16) │ │ (8) │ │ (20) │ │ (12) │ │ (5) │ │ │ +│ │ │ AI模型 │ │ 认证 │ │Git │ │ Issue │ │ 项目管理 │ │ 用户 │ │ PR审查 │ │ │ +│ │ │ 管理 │ │ 会话 │ │操作 │ │ 追踪 │ │ 权限控制 │ │ 偏好 │ │ 合并 │ │ │ +│ │ └─────────┘ └────────┘ └─────┘ └──────────┘ └───────────┘ └────────┘ └───────────┘ │ │ +│ │ │ │ +│ │ + utils/(project,repo,user) + ws_token + error + Pager │ │ +│ └──────────────────────────────────────┬──────────────────────────────────────────────────┘ │ +└──────────────────────────────────────────┼──────────────────────────────────────────────────────────┘ + │ + ┌──────────────────────┼──────────────────────┐ + │ │ │ +┌───────────────────▼──────────┐ ┌────────▼─────────────┐ ┌──────▼────────────────────────────┐ +│ HTTP 路由层 (libs/api) │ │ WebSocket 层 │ │ 后台 Worker 层 │ +│ 100 个路由文件 │ │ (libs/room) │ │ │ +│ │ │ │ │ libs/queue: │ +│ /api/auth/* (9端点) │ │ /ws │ │ MessageProducer │ +│ /api/git/* (100+端点) │ │ /ws/rooms/{id} │ │ RedisPubSub │ +│ /api/projects/* (50+端点) │ │ /ws/projects/{id} │ │ room_worker_task │ +│ /api/issue/* (30+端点) │ │ │ │ start_email_worker │ +│ /api/room/* (40+端点) │ │ 实时消息广播 │ │ │ +│ /api/pull_request/* (20端点)│ │ 多实例同步 │ │ libs/git/hook: │ +│ /api/agent/* (15端点) │ │ AI 流式输出 │ │ GitServiceHooks │ +│ /api/user/* (20端点) │ │ │ │ GitHookPool │ +│ /api/openapi/* (文档) │ │ │ │ │ +└───────────┬────────────────┘ └──────────┬───────────┘ └─────────────┬───────────────────────┘ + │ │ │ + └─────────────────────────────┼───────────────────────────┘ + │ +┌─────────────────────────────────────────▼────────────────────────────────────────────────────────┐ +│ 基 础 设 施 层 (Infrastructure Libs) │ +│ │ +│ ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ │ +│ │ libs/models │ │ libs/db │ │ libs/config │ │ libs/session │ │ +│ │ 92 个实体文件 │ │ 数据库连接池 │ │ 全局配置管理 │ │ 会话管理中间件 │ │ +│ │ Sea-ORM 实体定义 │ │ 缓存抽象 │ │ .env 加载 │ │ Redis Store │ │ +│ │ 类型别名 │ │ 重试机制 │ │ 12 子模块 │ │ JWT + Cookie │ │ +│ └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ │ +│ │ │ │ │ │ +│ ┌─────────▼─────────┐ ┌────────▼─────────┐ ┌─────────▼─────────┐ ┌─────────▼─────────┐ │ +│ │ libs/git │ │ libs/agent │ │ libs/email │ │ libs/avatar │ │ +│ │ 19 子模块 │ │ 6 子模块 │ │ SMTP 邮件发送 │ │ 图片处理 │ │ +│ │ libgit2 封装 │ │ OpenAI 集成 │ │ lettre 客户端 │ │ image crate │ │ +│ │ HTTP + SSH 协议 │ │ Qdrant 向量库 │ │ 模板引擎 │ │ 缩放/裁剪 │ │ +│ └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ │ +│ │ │ │ │ │ +│ ┌─────────▼─────────┐ ┌────────▼─────────┐ ┌─────────▼───────────────────────▼─────────┐ │ +│ │ libs/queue │ │ libs/room │ │ libs/migrate │ │ +│ │ 消息队列 │ │ 实时聊天室 │ │ 82+ 数据库迁移脚本 │ │ +│ │ Redis Streams │ │ 19 子模块 │ │ sea-orm-migration │ │ +│ │ Pub/Sub │ │ WebSocket 管理 │ │ up/down/fresh/refresh/reset │ │ +│ └─────────┬─────────┘ └─────────┬─────────┘ └─────────────────────────────────────────┘ │ +│ │ │ │ +│ ┌─────────▼─────────┐ ┌────────▼─────────┐ │ +│ │ libs/webhook │ │ libs/rpc │ libs/transport │ +│ │ (占位) │ │ (占位) │ (占位) │ +│ └───────────────────┘ └─────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────────┘ + │ │ + │ │ +┌─────────────▼──────────────────────▼────────────────────────────────────────────────────────────┐ +│ 存 储 层 │ +│ │ +│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────┐ │ +│ │ PostgreSQL │ │ Redis │ │ Qdrant │ │ 文件系统 │ │ +│ │ :5432 │ │ :6379 │ │ :6333 │ │ │ │ +│ │ │ │ │ │ │ │ /data/avatars │ │ +│ │ • 用户数据 │ │ • 会话存储 │ │ • 向量嵌入 │ │ /data/repos │ │ +│ │ • 项目/仓库 │ │ • 缓存数据 │ │ • AI 索引 │ │ • 头像图片 │ │ +│ │ • Issue/PR │ │ • Pub/Sub │ │ • 相似度检索 │ │ • Git 仓库 │ │ +│ │ • Room 消息 │ │ • Stream 队列 │ │ │ │ • 上传文件 │ │ +│ │ • 评论/标签 │ │ • Hook 队列 │ │ │ │ │ │ +│ └──────────────────┘ └──────────────────┘ └──────────────────┘ └──────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────────┘ + │ + │ 外部 API + │ +┌─────────────▼────────────────────────────────────────────────────────────────────────────────────┐ +│ 外 部 服 务 │ +│ │ +│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────┐ │ +│ │ SMTP 服务器 │ │ OpenAI API │ │ Embedding API │ │ +│ │ :587 │ │ HTTPS │ │ HTTPS │ │ +│ │ │ │ │ │ │ │ +│ │ • 邮件发送 │ │ • 聊天补全 │ │ • 文本向量化 │ │ +│ │ • 通知邮件 │ │ • AI 助手 │ │ • 相似度计算 │ │ +│ └──────────────────┘ └──────────────────┘ └──────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 前端架构分层 + +``` +┌────────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 前 端 应 用 层 (src/) │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ Vite + React + TypeScript │ │ +│ │ │ │ +│ │ src/main.tsx ──▶ App.tsx ──▶ BrowserRouter ──▶ Routes │ │ +│ │ │ │ +│ │ ┌───────────────────────┐ ┌───────────────────────┐ ┌───────────────────────┐ │ │ +│ │ │ 页面层 (app/) │ │ 组件层 (components/) │ │ 状态管理层 │ │ │ +│ │ │ 59 页面组件 │ │ 108 UI 组件 │ │ │ │ │ +│ │ │ │ │ │ │ TanStack Query │ │ │ +│ │ │ auth/ (4) │ │ ui/ (66) │ │ (服务端状态) │ │ │ +│ │ │ init/ (2) │ │ room/ (20) │ │ │ │ │ +│ │ │ user/ (1) │ │ repository/ (8) │ │ React Context │ │ │ +│ │ │ project/ (22) │ │ project/ (4) │ │ (全局状态) │ │ │ +│ │ │ repository/ (12) │ │ auth/ (2) │ │ │ │ │ +│ │ │ settings/ (8) │ │ layout/ (2) │ │ Local State │ │ │ +│ │ │ │ │ │ │ (组件状态) │ │ │ +│ │ └───────────┬───────────┘ └───────────┬───────────┘ └───────────┬────────────┘ │ │ +│ │ │ │ │ │ │ +│ │ └────────────────────────────┼────────────────────────────┘ │ │ +│ │ │ │ │ +│ │ ┌────────────────────────────────────────┼────────────────────────────────────────┐ │ │ +│ │ │ API 客户端层 │ │ │ +│ │ │ │ │ │ +│ │ │ src/client/ ──▶ openapi-ts 自动生成 (从 openapi.json) │ │ │ +│ │ │ 400+ API 函数 + 完整 TypeScript 类型 │ │ │ +│ │ │ Axios HTTP 客户端 │ │ │ +│ │ └──────────────────────────────────────────────────────────────────────────────────┘ │ │ +│ │ │ │ +│ │ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ 工具层 │ │ │ +│ │ │ │ │ │ +│ │ │ src/hooks/ ──▶ 自定义 React Hooks │ │ │ +│ │ │ src/lib/ ──▶ 工具函数 (api-error, rsa, date 等) │ │ │ +│ │ │ src/contexts/ ──▶ React Context (User, Theme 等) │ │ │ +│ │ │ src/assets/ ──▶ 静态资源 (图片、图标) │ │ │ +│ │ └──────────────────────────────────────────────────────────────────────────────────┘ │ │ +│ └──────────────────────────────────────────────────────────────────────────────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 前端路由结构 + +``` +/ 首页/仪表板 +│ +├── /auth/ 认证路由 +│ ├── /login 登录页 +│ ├── /register 注册页 +│ ├── /password/reset 密码重置 +│ └── /verify-email 邮箱验证 +│ +├── /init/ 初始化路由 +│ ├── /project 初始化项目 +│ └── /repository 初始化仓库 +│ +├── /user/:user 用户资料页 +│ +├── /settings/ 个人设置 +│ ├── /profile 个人资料 +│ ├── /account 账户设置 +│ ├── /security 安全设置 +│ ├── /tokens 访问令牌 +│ ├── /ssh-keys SSH 密钥 +│ ├── /preferences 偏好设置 +│ └── /activity 活动日志 +│ +├── /project/:project_name/ 项目路由 +│ ├── / 项目概览 +│ ├── /activity 项目活动 +│ ├── /repositories 仓库列表 +│ ├── /issues Issue 列表 +│ │ ├── /new 新建 Issue +│ │ └── /:issueNumber Issue 详情 +│ ├── /boards 看板列表 +│ │ └── /:boardId 看板详情 +│ ├── /members 成员管理 +│ ├── /room 聊天室列表 +│ │ └── /:roomId 聊天室 +│ ├── /articles 文章 +│ ├── /resources 资源 +│ └── /settings/ 项目设置 +│ ├── /general 通用设置 +│ ├── /labels 标签管理 +│ ├── /billing 账单 +│ ├── /members 成员管理 +│ ├── /oauth OAuth 配置 +│ └── /webhook Webhook 管理 +│ +├── /repository/:namespace/:repoName/ 仓库路由 +│ ├── / 仓库概览 +│ ├── /branches 分支管理 +│ ├── /commits 提交历史 +│ │ └── /:oid 提交详情 +│ ├── /contributors 贡献者 +│ ├── /files 文件浏览 +│ ├── /tags 标签 +│ ├── /pull-requests PR 列表 +│ │ ├── /new 新建 PR +│ │ └── /:prNumber PR 详情 +│ └── /settings 仓库设置 +│ +├── /search 全局搜索 +└── /notifications 通知中心 +``` + +--- + +## 后端服务依赖关系 + +``` +┌──────────────────────────────────────────────────────────────────────────────────────┐ +│ apps/ 应用依赖关系 │ +│ │ +│ apps/app ────────────────┐ │ +│ apps/email ──────────────┤ │ +│ apps/git-hook ───────────┤──▶ libs/config (全局配置) │ +│ apps/gitserver ──────────┤──▶ libs/db (数据库连接池 + 缓存) │ +│ apps/migrate ────────────┤──▶ libs/session (会话管理) │ +│ apps/operator ───────────┘──▶ libs/migrate (数据库迁移) │ +│ ├──▶ libs/service (业务逻辑层) │ +│ │ │ │ +│ │ ├──▶ libs/api (HTTP 路由) │ +│ │ │ │ +│ │ ├──▶ libs/agent (AI 服务) │ +│ │ ├──▶ libs/avatar (头像处理) │ +│ │ ├──▶ libs/email (邮件发送) │ +│ │ ├──▶ libs/room (聊天室) │ +│ │ │ │ │ +│ │ │ └──▶ libs/queue (消息队列) │ +│ │ │ │ +│ │ └──▶ libs/git (Git 操作) │ +│ │ │ │ +│ │ ├──▶ git2 (libgit2 绑定) │ +│ │ ├──▶ git2-hooks (Git 钩子) │ +│ │ └──▶ russh (SSH 协议) │ +│ │ │ +│ └──▶ libs/models (数据模型 - 所有层共享) │ +│ │ │ +│ ├──▶ users/ (12 实体) │ +│ ├──▶ projects/ (19 实体) │ +│ ├──▶ repos/ (16 实体) │ +│ ├──▶ issues/ (10 实体) │ +│ ├──▶ pull_request/ (5 实体) │ +│ ├──▶ rooms/ (11 实体) │ +│ ├──▶ agents/ (6 实体) │ +│ ├──▶ ai/ (3 实体) │ +│ └──▶ system/ (3 实体) │ +└──────────────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## libs/models 实体分组 + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ libs/models 实体分组 (92 个) │ +├─────────────────────────────────────────────────────────────────────────┤ +│ │ +│ Users (12 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ user 用户基本信息 │ │ +│ │ user_2fa 双因素认证 │ │ +│ │ user_activity_log 用户活动日志 │ │ +│ │ user_email 用户邮箱 │ │ +│ │ user_email_change 邮箱变更历史 │ │ +│ │ user_notification 用户通知 │ │ +│ │ user_password 用户密码 │ │ +│ │ user_password_reset 密码重置令牌 │ │ +│ │ user_preferences 用户偏好设置 │ │ +│ │ user_relation 用户关系 │ │ +│ │ user_ssh_key SSH 密钥 │ │ +│ │ user_token 访问令牌 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Projects (19 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ project 项目基本信息 │ │ +│ │ project_access_log 访问日志 │ │ +│ │ project_activity 活动记录 │ │ +│ │ project_audit_log 审计日志 │ │ +│ │ project_billing 账单信息 │ │ +│ │ project_billing_history 账单历史 │ │ +│ │ project_board 看板 │ │ +│ │ project_board_card 看板卡片 │ │ +│ │ project_board_column 看板列 │ │ +│ │ project_follow 项目关注 │ │ +│ │ project_history_name 历史名称 │ │ +│ │ project_label 项目标签 │ │ +│ │ project_like 项目点赞 │ │ +│ │ project_member_ 成员邀请 │ │ +│ │ invitations │ │ +│ │ project_member_join_ 加入问答 │ │ +│ │ answers │ │ +│ │ project_member_join_ 加入请求 │ │ +│ │ request │ │ +│ │ project_member_join_ 加入设置 │ │ +│ │ settings │ │ +│ │ project_members 项目成员 │ │ +│ │ project_watch 项目观看 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Repos (16 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ repo 仓库基本信息 │ │ +│ │ repo_branch 分支信息 │ │ +│ │ repo_branch_protect 分支保护 │ │ +│ │ repo_collaborator 协作者 │ │ +│ │ repo_commit 提交记录 │ │ +│ │ repo_fork 仓库 Fork │ │ +│ │ repo_history_name 历史名称 │ │ +│ │ repo_hook Git 钩子 │ │ +│ │ repo_lfs_lock LFS 锁定 │ │ +│ │ repo_lfs_object LFS 对象 │ │ +│ │ repo_lock 仓库锁定 │ │ +│ │ repo_star 仓库星标 │ │ +│ │ repo_tag 仓库标签 │ │ +│ │ repo_upstream 上游仓库 │ │ +│ │ repo_watch 仓库观看 │ │ +│ │ repo_webhook 仓库 Webhook │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Issues (10 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ issue Issue 基本信息 │ │ +│ │ issue_assignee Issue 负责人 │ │ +│ │ issue_comment Issue 评论 │ │ +│ │ issue_comment_reaction 评论表情 │ │ +│ │ issue_label Issue 标签 │ │ +│ │ issue_pull_request Issue 关联 PR │ │ +│ │ issue_reaction Issue 表情 │ │ +│ │ issue_repo Issue 仓库 │ │ +│ │ issue_subscriber Issue 订阅者 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Pull Requests (5 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ pull_request PR 基本信息 │ │ +│ │ pull_request_commit PR 提交记录 │ │ +│ │ pull_request_review PR 审查 │ │ +│ │ pull_request_review_ PR 审查评论 │ │ +│ │ comment │ │ +│ │ pull_request_review_ PR 审查请求 │ │ +│ │ request │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Rooms (11 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ room 聊天室基本信息 │ │ +│ │ room_ai 聊天室 AI 配置 │ │ +│ │ room_category 聊天室分类 │ │ +│ │ room_member 聊天室成员 │ │ +│ │ room_message 聊天消息 │ │ +│ │ room_message_edit_ 消息编辑历史 │ │ +│ │ history │ │ +│ │ room_message_reaction 消息表情 │ │ +│ │ room_notifications 聊天室通知 │ │ +│ │ room_pin 聊天室置顶 │ │ +│ │ room_thread 聊天室 Thread │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ Agents (6 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ model AI 模型 │ │ +│ │ model_capability 模型能力 │ │ +│ │ model_parameter_profile 模型参数配置 │ │ +│ │ model_pricing 模型定价 │ │ +│ │ model_provider 模型提供商 │ │ +│ │ model_version 模型版本 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ AI (3 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ ai_session AI 会话 │ │ +│ │ ai_tool_auth AI 工具认证 │ │ +│ │ ai_tool_call AI 工具调用 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ System (3 实体) │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ label 系统标签 │ │ +│ │ notify 系统通知 │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## libs/service 业务模块 + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ libs/service 业务模块 (93 个文件) │ +├─────────────────────────────────────────────────────────────────────────┤ +│ │ +│ agent/ AI 模型管理 (8 文件) │ +│ ├── code_review AI 代码审查 │ +│ ├── model AI 模型管理 │ +│ ├── model_capability 模型能力管理 │ +│ ├── model_parameter_ 模型参数配置 │ +│ │ profile │ +│ ├── model_pricing 模型定价管理 │ +│ ├── model_version 模型版本管理 │ +│ ├── pr_summary PR 摘要生成 │ +│ └── provider 模型提供商管理 │ +│ │ +│ auth/ 认证管理 (10 文件) │ +│ ├── captcha 验证码管理 │ +│ ├── email 邮箱认证 │ +│ ├── login 登录逻辑 │ +│ ├── logout 登出逻辑 │ +│ ├── me 当前用户信息 │ +│ ├── password 密码管理 │ +│ ├── register 注册逻辑 │ +│ ├── rsa RSA 加密 │ +│ └── totp TOTP 双因素认证 │ +│ │ +│ git/ Git 操作 (16 文件) │ +│ ├── archive 仓库归档 │ +│ ├── blocking 阻塞操作 │ +│ ├── blame Git Blame │ +│ ├── blob Blob 操作 │ +│ ├── branch 分支操作 │ +│ ├── branch_ 分支保护 │ +│ │ protection │ +│ ├── commit 提交操作 │ +│ ├── contributors 贡献者统计 │ +│ ├── diff Diff 操作 │ +│ ├── init 仓库初始化 │ +│ ├── refs 引用操作 │ +│ ├── repo 仓库操作 │ +│ ├── star 星标操作 │ +│ ├── tag 标签操作 │ +│ ├── tree 树操作 │ +│ └── watch 观看操作 │ +│ │ +│ issue/ Issue 管理 (8 文件) │ +│ ├── assignee 负责人管理 │ +│ ├── comment 评论管理 │ +│ ├── issue Issue CRUD │ +│ ├── label 标签管理 │ +│ ├── pull_request Issue 关联 PR │ +│ ├── reaction 表情回应 │ +│ ├── repo 仓库 Issue │ +│ └── subscriber 订阅者管理 │ +│ │ +│ project/ 项目管理 (20 文件) │ +│ ├── activity 项目活动 │ +│ ├── audit 审计日志 │ +│ ├── avatar 项目头像 │ +│ ├── billing 账单管理 │ +│ ├── board 看板管理 │ +│ ├── can_use 权限检查 │ +│ ├── info 项目信息 │ +│ ├── init 项目初始化 │ +│ ├── invitation 邀请管理 │ +│ ├── join_answers 加入问答 │ +│ ├── join_request 加入请求 │ +│ ├── join_settings 加入设置 │ +│ ├── labels 标签管理 │ +│ ├── like 点赞管理 │ +│ ├── members 成员管理 │ +│ ├── repo 仓库管理 │ +│ ├── repo_ 仓库权限 │ +│ │ permission │ +│ ├── settings 项目设置 │ +│ ├── standard 项目标准 │ +│ ├── transfer_repo 仓库转移 │ +│ └── watch 观看管理 │ +│ │ +│ pull_request/ PR 管理 (5 文件) │ +│ ├── merge PR 合并 │ +│ ├── pull_request PR CRUD │ +│ ├── review PR 审查 │ +│ ├── review_comment 审查评论 │ +│ └── review_request 审查请求 │ +│ │ +│ user/ 用户管理 (12 文件) │ +│ ├── access_key 访问密钥 │ +│ ├── avatar 用户头像 │ +│ ├── chpc 用户 CHPC │ +│ ├── notification 通知管理 │ +│ ├── notify 通知发送 │ +│ ├── preferences 偏好设置 │ +│ ├── profile 用户资料 │ +│ ├── projects 用户项目 │ +│ ├── repository 用户仓库 │ +│ ├── ssh_key SSH 密钥 │ +│ ├── subscribe 订阅管理 │ +│ └── user_info 用户信息 │ +│ │ +│ utils/ 工具函数 (3 文件) │ +│ ├── project 项目工具 │ +│ ├── repo 仓库工具 │ +│ └── user 用户工具 │ +│ │ +│ ws_token WebSocket Token 服务 │ +│ error 服务层错误 │ +│ Pager 分页结构体 │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## libs/api 路由模块 + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ libs/api 路由模块 (100 个文件) │ +├─────────────────────────────────────────────────────────────────────────┤ +│ │ +│ auth/ 认证路由 (9 文件) │ +│ ├── captcha 验证码接口 │ +│ ├── email 邮箱认证接口 │ +│ ├── login 登录接口 │ +│ ├── logout 登出接口 │ +│ ├── me 当前用户接口 │ +│ ├── password 密码接口 │ +│ ├── register 注册接口 │ +│ ├── totp TOTP 接口 │ +│ └── ws_token WebSocket Token 接口 │ +│ │ +│ git/ Git 路由 (18 文件) │ +│ ├── archive 归档接口 │ +│ ├── blame Blame 接口 │ +│ ├── blob Blob 接口 │ +│ ├── branch 分支接口 │ +│ ├── branch_ 分支保护接口 │ +│ │ protection │ +│ ├── commit 提交接口 │ +│ ├── contributors 贡献者接口 │ +│ ├── diff Diff 接口 │ +│ ├── init 初始化接口 │ +│ ├── refs 引用接口 │ +│ ├── repo 仓库接口 │ +│ ├── star 星标接口 │ +│ ├── tag 标签接口 │ +│ ├── tree 树接口 │ +│ └── watch 观看接口 │ +│ │ +│ project/ 项目路由 (17 文件) │ +│ ├── activity 活动接口 │ +│ ├── audit 审计接口 │ +│ ├── billing 账单接口 │ +│ ├── board 看板接口 │ +│ ├── info 信息接口 │ +│ ├── init 初始化接口 │ +│ ├── invitation 邀请接口 │ +│ ├── join_answers 加入问答接口 │ +│ ├── join_request 加入请求接口 │ +│ ├── join_settings 加入设置接口 │ +│ ├── labels 标签接口 │ +│ ├── like 点赞接口 │ +│ ├── members 成员接口 │ +│ ├── repo 仓库接口 │ +│ ├── settings 设置接口 │ +│ ├── transfer_repo 仓库转移接口 │ +│ └── watch 观看接口 │ +│ │ +│ issue/ Issue 路由 (10 文件) │ +│ ├── assignee 负责人接口 │ +│ ├── comment 评论接口 │ +│ ├── comment_ 评论表情接口 │ +│ │ reaction │ +│ ├── issue_label Issue 标签接口 │ +│ ├── label 标签接口 │ +│ ├── pull_request Issue 关联 PR 接口 │ +│ ├── reaction 表情接口 │ +│ ├── repo 仓库 Issue 接口 │ +│ └── subscriber 订阅者接口 │ +│ │ +│ room/ 聊天室路由 (14 文件) │ +│ ├── ai AI 接口 │ +│ ├── category 分类接口 │ +│ ├── draft_and_ 草稿和历史接口 │ +│ │ history │ +│ ├── member 成员接口 │ +│ ├── message 消息接口 │ +│ ├── notification 通知接口 │ +│ ├── pin 置顶接口 │ +│ ├── reaction 表情接口 │ +│ ├── room 聊天室接口 │ +│ ├── thread Thread 接口 │ +│ ├── ws WebSocket 接口 │ +│ ├── ws_handler WebSocket 处理器 │ +│ ├── ws_types WebSocket 类型 │ +│ └── ws_universal 通用 WebSocket 接口 │ +│ │ +│ pull_request/ PR 路由 (5 文件) │ +│ ├── merge 合并接口 │ +│ ├── pull_request PR CRUD 接口 │ +│ ├── review 审查接口 │ +│ ├── review_comment 审查评论接口 │ +│ └── review_request 审查请求接口 │ +│ │ +│ agent/ AI Agent 路由 (8 文件) │ +│ ├── code_review 代码审查接口 │ +│ ├── model 模型接口 │ +│ ├── model_ 模型能力接口 │ +│ │ capability │ +│ ├── model_ 模型参数配置接口 │ +│ │ parameter_profile │ +│ ├── model_pricing 模型定价接口 │ +│ ├── model_version 模型版本接口 │ +│ ├── pr_summary PR 摘要接口 │ +│ └── provider 模型提供商接口 │ +│ │ +│ user/ 用户路由 (10 文件) │ +│ ├── access_key 访问密钥接口 │ +│ ├── chpc CHPC 接口 │ +│ ├── notification 通知接口 │ +│ ├── preferences 偏好接口 │ +│ ├── profile 资料接口 │ +│ ├── projects 项目接口 │ +│ ├── repository 仓库接口 │ +│ ├── ssh_key SSH 密钥接口 │ +│ ├── subscribe 订阅接口 │ +│ └── user_info 用户信息接口 │ +│ │ +│ openapi/ OpenAPI 文档生成 │ +│ route/ 路由聚合 │ +│ error/ API 错误处理 │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 服务间通信机制 + +``` +┌────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 服务间通信机制 │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ Redis (核心通信总线) │ │ +│ │ │ │ +│ │ Redis Streams ──▶ 异步消息队列 │ │ +│ │ ├── room:stream:{room_id} 房间消息持久化 │ │ +│ │ └── email:stream 邮件发送队列 │ │ +│ │ │ │ +│ │ Redis Pub/Sub ──▶ 实时事件广播 │ │ +│ │ ├── room:pub:{room_id} 房间级广播 │ │ +│ │ └── project:pub:{proj_id} 项目级广播 │ │ +│ │ │ │ +│ │ Redis Lists ──▶ 任务队列 │ │ +│ │ ├── {hook}:sync Git Hook 同步任务 │ │ +│ │ ├── {hook}:fsck Git Hook 完整性检查 │ │ +│ │ └── {hook}:gc Git Hook 垃圾回收 │ │ +│ └──────────────────────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ HTTP/REST API ──▶ 同步服务调用 │ │ +│ │ ├── app ↔ gitserver Git 元数据查询 │ │ +│ │ └── app → 外部 AI 服务 OpenAI 兼容 API 调用 │ │ +│ └──────────────────────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ WebSocket ──▶ 客户端实时通信 │ │ +│ │ ├── /ws 通用 WebSocket (多房间订阅) │ │ +│ │ ├── /ws/rooms/{room_id} 房间级 WebSocket │ │ +│ │ └── /ws/projects/{proj_id} 项目级 WebSocket │ │ +│ └──────────────────────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────────────────────┐ │ +│ │ Kubernetes CRD + Operator ──▶ 基础设施编排 │ │ +│ │ ├── apps.code.dev App CRD → Deployment + Service │ │ +│ │ ├── gitservers.code.dev GitServer CRD → Deployment + Service + PVC │ │ +│ │ ├── emailworkers.code.dev EmailWorker CRD → Deployment │ │ +│ │ ├── githooks.code.dev GitHook CRD → Deployment + ConfigMap │ │ +│ │ └── migrates.code.dev Migrate CRD → Job │ │ +│ └──────────────────────────────────────────────────────────────────────────────────┘ │ +└────────────────────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 数据流详解 + +### 1. 聊天消息流程 + +``` +客户端 A app 实例 1 Redis app 实例 2 客户端 B + │ │ │ │ │ + │── WS 发送消息 ───────▶│ │ │ │ + │ │── XADD ──────────────▶│ │ │ + │ │ room:stream:{id} │ │ │ + │ │── PUBLISH ────────────▶│ │ │ + │ │ room:pub:{id} │ │ │ + │ │ │── 事件通知 ────────────▶│ │ + │ │ │ │── WS 推送 ────────────▶│ + │◀─ ACK ───────────────│ │ │ │ + │ │ │ │ │ + │ │◀──── XREADGROUP ─────│ │ │ + │ │ (room_worker) │ │ │ + │ │── 写入 PostgreSQL ────│ │ │ +``` + +### 2. Git Push 流程 + +``` +客户端 gitserver Redis git-hook PostgreSQL + │ │ │ │ │ + │── git push ────────▶│ │ │ │ + │ (HTTP/SSH) │ │ │ │ + │ │── git-receive-pack──▶│ │ │ + │ │── LPUSH ────────────▶│ │ │ + │ │ {hook}:sync │ │ │ + │◀─ ACK ─────────────│ │ │ │ + │ │ │── BRPOPLPUSH ─────▶│ │ + │ │ │ │── 同步元数据 ────────▶│ + │ │ │ │── 可选: fsck/gc ─────▶│ + │ │ │◀── XACK ──────────│ │ +``` + +### 3. 邮件发送流程 + +``` +业务逻辑 app Redis email-worker SMTP + │ │ │ │ │ + │── 触发邮件 ────────▶│ │ │ │ + │ │── XADD ───────────▶│ │ │ + │ │ email:stream │ │ │ + │◀─ 返回 ───────────│ │ │ │ + │ │ │── XREADGROUP ─────▶│ │ + │ │ │ │── 渲染模板 ──────────▶│ + │ │ │ │── SMTP 发送 ─────────▶│ + │ │ │◀── XACK ──────────│ │ +``` + +### 4. AI 聊天流程 + +``` +客户端 app OpenAI API Qdrant PostgreSQL + │ │ │ │ │ + │── AI 消息 ──────────▶│ │ │ │ + │ │── 生成 Embedding ──▶│ │ │ + │ │◀──── 向量 ──────────│ │ │ + │ │── 存储向量 ─────────────────────────────▶│ │ + │ │── 流式 Chat ─────────▶│ │ │ + │◀─ Stream Chunk ──────│◀──── Stream ─────────│ │ │ + │ │ │ │ │ + │ │── 保存消息 ────────────────────────────────────────────────▶│ + │ │── 检索相似消息 ────────────────────────▶│ │ + │ │◀── 相似结果 ───────────────────────────│ │ +``` + +--- + +## 技术栈汇总 + +### 后端技术栈 + +| 类别 | 技术 | 版本 | +|------|------|------| +| **语言** | Rust | Edition 2024 | +| **Web 框架** | Actix-web | 4.13.0 | +| **WebSocket** | Actix-ws | 0.4.0 | +| **ORM** | SeaORM | 2.0.0-rc.37 | +| **数据库** | PostgreSQL | - | +| **缓存/消息** | Redis | 1.1.0 | +| **向量库** | Qdrant | 1.17.0 | +| **Git** | git2 / russh | 0.20.0 / 0.55.0 | +| **邮件** | Lettre | 0.11.19 | +| **AI** | async-openai | 0.34.0 | +| **K8s** | kube-rs | 0.98 | +| **gRPC** | Tonic | 0.14.5 | +| **日志** | slog / tracing | 2.8 / 0.1.44 | + +### 前端技术栈 + +| 类别 | 技术 | 版本 | +|------|------|------| +| **语言** | TypeScript | 5.9 | +| **框架** | React | 19.2 | +| **路由** | React Router | 7.13 | +| **构建** | Vite + SWC | 8.0 | +| **UI** | shadcn/ui + Tailwind | 4.11 / 4.2 | +| **状态** | TanStack Query | 5.96 | +| **HTTP** | Axios + OpenAPI 生成 | 1.7 | +| **Markdown** | react-markdown + Shiki | 10 / 1 | +| **拖拽** | dnd-kit | 6.3 | + +--- + +## Docker 与 K8s 部署 + +``` +┌──────────────────────────────────────────────────────────────────────────────┐ +│ Docker 镜像 (6 个) │ +│ │ +│ docker/app.Dockerfile ──▶ apps/app 主应用镜像 │ +│ docker/email-worker.Dockerfile ──▶ apps/email 邮件 Worker 镜像 │ +│ docker/git-hook.Dockerfile ──▶ apps/git-hook Git Hook 镜像 │ +│ docker/gitserver.Dockerfile ──▶ apps/gitserver Git Server 镜像 │ +│ docker/migrate.Dockerfile ──▶ apps/migrate 数据库迁移镜像 │ +│ docker/operator.Dockerfile ──▶ apps/operator K8s Operator 镜像 │ +└──────────────────────────────────────────────────────────────────────────────┘ + +┌──────────────────────────────────────────────────────────────────────────────┐ +│ Kubernetes CRD (5 个) │ +│ │ +│ docker/crd/app-crd.yaml ──▶ apps.code.dev │ +│ docker/crd/gitserver-crd.yaml ──▶ gitservers.code.dev │ +│ docker/crd/email-worker-crd.yaml ──▶ emailworkers.code.dev │ +│ docker/crd/git-hook-crd.yaml ──▶ githooks.code.dev │ +│ docker/crd/migrate-crd.yaml ──▶ migrates.code.dev │ +└──────────────────────────────────────────────────────────────────────────────┘ + +┌──────────────────────────────────────────────────────────────────────────────┐ +│ K8s 部署配置 │ +│ │ +│ docker/operator/deployment.yaml ──▶ Operator Deployment │ +│ docker/operator/example/ ──▶ CRD 使用示例 │ +│ code-system.yaml │ +└──────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 关键设计特点 + +| 特点 | 描述 | +|------|------| +| **Monorepo 架构** | Rust workspace + 前端 monorepo,统一管理 | +| **清晰分层** | 路由层 → 业务层 → 基础设施层 → 存储层,职责明确 | +| **异步优先** | 基于 Redis Streams 的异步消息处理 | +| **实时通信** | WebSocket + Redis Pub/Sub 实现多实例同步 | +| **K8s 原生** | Operator + 5 个 CRD 管理全生命周期 | +| **类型安全** | OpenAPI 自动生成 TypeScript 客户端 | +| **可扩展** | 服务独立部署,水平扩展 | +| **Git 兼容** | 完整支持 HTTP/SSH Git 协议 + LFS | +| **AI 集成** | 原生集成 OpenAI 兼容 API + 向量检索 | +| **92 个数据库实体** | 覆盖用户、项目、仓库、Issue、PR、聊天室、AI 等完整业务域 | diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..ea00f0e --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,27 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' +import {defineConfig, globalIgnores} from 'eslint/config' + +export default defineConfig([ + globalIgnores(['dist', 'src/client/**']), + { + files: ['**/*.{ts,tsx}'], + extends: [ + js.configs.recommended, + tseslint.configs.recommended, + reactHooks.configs.flat.recommended, + reactRefresh.configs.vite, + ], + rules: { + // Disable set-state-in-effect as it's a valid pattern for initializing form state from server data + 'react-hooks/set-state-in-effect': 'off', + }, + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + }, +]) diff --git a/index.html b/index.html new file mode 100644 index 0000000..6384476 --- /dev/null +++ b/index.html @@ -0,0 +1,13 @@ +<!doctype html> +<html lang="en"> +<head> + <meta charset="UTF-8"/> + <link href="/logo.png" rel="icon" type="image/svg+xml"/> + <meta content="width=device-width, initial-scale=1.0" name="viewport"/> + <title>GitDataAi + + +
+ + + diff --git a/libs/agent-tool-derive/Cargo.toml b/libs/agent-tool-derive/Cargo.toml new file mode 100644 index 0000000..1fa6e38 --- /dev/null +++ b/libs/agent-tool-derive/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "agent-tool-derive" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[lib] +proc-macro = true +path = "src/lib.rs" + +[dependencies] +syn = { version = "2", features = ["full", "extra-traits"] } +quote = "1" +proc-macro2 = "1" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +convert_case = "0.11" +futures = "0.3" \ No newline at end of file diff --git a/libs/agent-tool-derive/src/lib.rs b/libs/agent-tool-derive/src/lib.rs new file mode 100644 index 0000000..7ce2512 --- /dev/null +++ b/libs/agent-tool-derive/src/lib.rs @@ -0,0 +1,373 @@ +//! Procedural macro for generating tool definitions from functions. +//! +//! # Example +//! +//! ``` +//! use agent_tool_derive::tool; +//! +//! #[tool(description = "Search issues by title")] +//! fn search_issues( +//! title: String, +//! status: Option, +//! ) -> Result, String> { +//! Ok(vec![]) +//! } +//! ``` +//! +//! Generates: +//! - A `SearchIssuesParameters` struct (serde Deserialize) +//! - A `SEARCH_ISSUES_DEFINITION: ToolDefinition` constant +//! - A `register_search_issues(registry: &mut ToolRegistry)` helper + +extern crate proc_macro; + +use convert_case::{Case, Casing}; +use proc_macro::TokenStream; +use quote::{format_ident, quote}; +use std::collections::HashMap; +use syn::punctuated::Punctuated; +use syn::{ + Expr, ExprLit, Ident, Lit, Meta, ReturnType, Token, Type, + parse::{Parse, ParseStream}, +}; + +/// Parse the attribute arguments: `description = "...", params(...), required(...)` +struct ToolArgs { + description: Option, + param_descriptions: HashMap, + required: Vec, +} + +impl Parse for ToolArgs { + fn parse(input: ParseStream) -> syn::Result { + Self::parse_from(input) + } +} + +impl ToolArgs { + fn new() -> Self { + Self { + description: None, + param_descriptions: HashMap::new(), + required: Vec::new(), + } + } + + fn parse_from(input: ParseStream) -> syn::Result { + let mut this = Self::new(); + if input.is_empty() { + return Ok(this); + } + + let meta_list: Punctuated = Punctuated::parse_terminated(input)?; + + for meta in meta_list { + match meta { + Meta::NameValue(nv) => { + let ident = nv + .path + .get_ident() + .ok_or_else(|| syn::Error::new_spanned(&nv.path, "expected identifier"))?; + if ident == "description" { + if let Expr::Lit(ExprLit { + lit: Lit::Str(s), .. + }) = nv.value + { + this.description = Some(s.value()); + } else { + return Err(syn::Error::new_spanned( + &nv.value, + "description must be a string literal", + )); + } + } + } + Meta::List(list) if list.path.is_ident("params") => { + let inner: Punctuated = + list.parse_args_with(Punctuated::parse_terminated)?; + for item in inner { + if let Meta::NameValue(nv) = item { + let param_name = nv + .path + .get_ident() + .ok_or_else(|| { + syn::Error::new_spanned(&nv.path, "expected identifier") + })? + .to_string(); + if let Expr::Lit(ExprLit { + lit: Lit::Str(s), .. + }) = nv.value + { + this.param_descriptions.insert(param_name, s.value()); + } + } + } + } + Meta::List(list) if list.path.is_ident("required") => { + let required_vars: Punctuated = + list.parse_args_with(Punctuated::parse_terminated)?; + for var in required_vars { + this.required.push(var.to_string()); + } + } + _ => {} + } + } + Ok(this) + } +} + +/// Map a Rust type to its JSON Schema type name. +fn json_type(ty: &Type) -> proc_macro2::TokenStream { + use syn::Type as T; + let segs = match ty { + T::Path(p) => &p.path.segments, + _ => return quote! { "type": "object" }, + }; + let last = segs.last().map(|s| &s.ident); + let args = segs.last().and_then(|s| { + if let syn::PathArguments::AngleBracketed(a) = &s.arguments { + Some(&a.args) + } else { + None + } + }); + + match (last.map(|i| i.to_string()).as_deref(), args) { + (Some("Vec" | "vec::Vec"), Some(args)) if !args.is_empty() => { + if let syn::GenericArgument::Type(inner) = &args[0] { + let inner_type = json_type(inner); + return quote! { + { + "type": "array", + "items": { #inner_type } + } + }; + } + quote! { "type": "array" } + } + (Some("String" | "str" | "char"), _) => quote! { "type": "string" }, + (Some("bool"), _) => quote! { "type": "boolean" }, + (Some("i8" | "i16" | "i32" | "i64" | "isize"), _) => quote! { "type": "integer" }, + (Some("u8" | "u16" | "u32" | "u64" | "usize"), _) => quote! { "type": "integer" }, + (Some("f32" | "f64"), _) => quote! { "type": "number" }, + _ => quote! { "type": "object" }, + } +} + +/// Extract return type info from `-> Result`. +fn parse_return_type( + ret: &ReturnType, +) -> syn::Result<(proc_macro2::TokenStream, proc_macro2::TokenStream)> { + match ret { + ReturnType::Type(_, ty) => { + let ty = &**ty; + if let Type::Path(p) = ty { + let last = p + .path + .segments + .last() + .ok_or_else(|| syn::Error::new_spanned(&p.path, "invalid return type"))?; + if last.ident == "Result" { + if let syn::PathArguments::AngleBracketed(a) = &last.arguments { + let args = &a.args; + if args.len() == 2 { + let ok = &args[0]; + let err = &args[1]; + return Ok((quote!(#ok), quote!(#err))); + } + } + return Err(syn::Error::new_spanned( + &last, + "Result must have 2 type parameters", + )); + } + } + Err(syn::Error::new_spanned( + ty, + "function must return Result", + )) + } + _ => Err(syn::Error::new_spanned( + ret, + "function must have a return type", + )), + } +} + +/// The `#[tool]` attribute macro. +/// +/// Usage: +/// ``` +/// #[tool(description = "Tool description", params( +/// arg1 = "Description of arg1", +/// arg2 = "Description of arg2", +/// ))] +/// async fn my_tool(arg1: String, arg2: Option) -> Result { +/// Ok(serde_json::json!({})) +/// } +/// ``` +/// +/// Generates: +/// - `MyToolParameters` struct with serde Deserialize +/// - `MY_TOOL_DEFINITION: ToolDefinition` constant +/// - `register_my_tool(registry: &mut ToolRegistry)` helper function +#[proc_macro_attribute] +pub fn tool(args: TokenStream, input: TokenStream) -> TokenStream { + let args = syn::parse_macro_input!(args as ToolArgs); + let input_fn = syn::parse_macro_input!(input as syn::ItemFn); + + let fn_name = &input_fn.sig.ident; + let fn_name_str = fn_name.to_string(); + let vis = &input_fn.vis; + let is_async = input_fn.sig.asyncness.is_some(); + + // Parse return type: Result + let (_output_type, _error_type) = match parse_return_type(&input_fn.sig.output) { + Ok(t) => t, + Err(e) => return e.into_compile_error().into(), + }; + + // PascalCase struct name + let struct_name = format_ident!("{}", fn_name_str.to_case(Case::Pascal)); + let params_struct_name = format_ident!("{}Parameters", struct_name); + let definition_const_name = format_ident!("{}_DEFINITION", fn_name_str.to_uppercase()); + let register_fn_name = format_ident!("register_{}", fn_name_str); + + // Extract parameters from function signature + let mut param_names: Vec = Vec::new(); + let mut param_types: Vec = Vec::new(); + let mut param_json_types: Vec = Vec::new(); + let mut param_descs: Vec = Vec::new(); + + let required_args = args.required.clone(); + + for arg in &input_fn.sig.inputs { + let syn::FnArg::Typed(pat_type) = arg else { + continue; + }; + let syn::Pat::Ident(pat_ident) = &*pat_type.pat else { + continue; + }; + let name = &pat_ident.ident; + let ty = &*pat_type.ty; + + let name_str = name.to_string(); + let desc = args + .param_descriptions + .get(&name_str) + .map(|s| quote! { #s.to_string() }) + .unwrap_or_else(|| quote! { format!("Parameter {}", #name_str) }); + + param_names.push(format_ident!("{}", name.to_string())); + param_types.push(ty.clone()); + param_json_types.push(json_type(ty)); + param_descs.push(desc); + } + + // Which params are required (not Option) + let required: Vec = if required_args.is_empty() { + param_names + .iter() + .filter(|name| { + let name_str = name.to_string(); + !args + .param_descriptions + .contains_key(&format!("{}_opt", name_str)) + }) + .map(|name| quote! { stringify!(#name) }) + .collect() + } else { + required_args.iter().map(|s| quote! { #s }).collect() + }; + + // Tool description + let tool_description = args + .description + .map(|s| quote! { #s.to_string() }) + .unwrap_or_else(|| quote! { format!("Function {}", #fn_name_str) }); + + // Call invocation (async vs sync) + let call_args = param_names.iter().map(|n| quote! { args.#n }); + let fn_call = if is_async { + quote! { #fn_name(#(#call_args),*).await } + } else { + quote! { #fn_name(#(#call_args),*) } + }; + + let expanded = quote! { + // Parameters struct: deserialized from JSON args by serde + #[derive(serde::Deserialize)] + #vis struct #params_struct_name { + #(#vis #param_names: #param_types,)* + } + + // Keep the original function unchanged + #input_fn + + // Static ToolDefinition constant — register this with ToolRegistry + #vis const #definition_const_name: agent::ToolDefinition = agent::ToolDefinition { + name: #fn_name_str.to_string(), + description: Some(#tool_description), + parameters: Some(agent::ToolSchema { + schema_type: "object".to_string(), + properties: Some({ + let mut map = std::collections::HashMap::new(); + #({ + map.insert(stringify!(#param_names).to_string(), agent::ToolParam { + name: stringify!(#param_names).to_string(), + param_type: { + let jt = #param_json_types; + jt.get("type") + .and_then(|v| v.as_str()) + .unwrap_or("object") + .to_string() + }, + description: Some(#param_descs), + required: true, + properties: None, + items: None, + }); + })* + map + }), + required: Some(vec![#(#required.to_string()),*]), + }), + strict: false, + }; + + /// Registers this tool in the given registry. + /// + /// Generated by `#[tool]` macro for function `#fn_name_str`. + #vis fn #register_fn_name(registry: &mut agent::ToolRegistry) { + let def = #definition_const_name.clone(); + let fn_name = #fn_name_str.to_string(); + registry.register_fn(fn_name, move |_ctx, args| { + let args: #params_struct_name = match serde_json::from_value(args) { + Ok(a) => a, + Err(e) => { + return std::pin::Pin::new(Box::new(async move { + Err(agent::ToolError::ParseError(e.to_string())) + })) + } + }; + std::pin::Pin::new(Box::new(async move { + let result = #fn_call; + match result { + Ok(v) => Ok(serde_json::to_value(v).unwrap_or(serde_json::Value::Null)), + Err(e) => Err(agent::ToolError::ExecutionError(e.to_string())), + } + })) + }); + } + }; + + // We need to use boxed futures for the return type. + // Since we can't add runtime dependencies to the proc-macro crate, + // we emit the .boxed() call and the caller must ensure + // `use futures::FutureExt;` or equivalent is in scope. + // The generated code requires: `futures::FutureExt` (for .boxed()). + + // Re-emit with futures dependency note + TokenStream::from(expanded) +} diff --git a/libs/agent/Cargo.toml b/libs/agent/Cargo.toml new file mode 100644 index 0000000..ecb2bee --- /dev/null +++ b/libs/agent/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "agent" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "agent" +[dependencies] +async-openai = { version = "0.34.0", features = ["embedding", "chat-completion", "model"] } +tokio = { workspace = true } +async-trait = { workspace = true } +qdrant-client = { workspace = true } +sea-orm = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +thiserror = { workspace = true } +db = { workspace = true } +config = { path = "../config" } +models = { workspace = true } +chrono = { workspace = true } +uuid = { workspace = true } +futures = { workspace = true } +tiktoken-rs = { workspace = true } +agent-tool-derive = { path = "../agent-tool-derive" } +once_cell = { workspace = true } +regex = { workspace = true } +[lints] +workspace = true diff --git a/libs/agent/chat/context.rs b/libs/agent/chat/context.rs new file mode 100644 index 0000000..438d632 --- /dev/null +++ b/libs/agent/chat/context.rs @@ -0,0 +1,200 @@ +use async_openai::types::chat::{ + ChatCompletionRequestAssistantMessage, ChatCompletionRequestAssistantMessageContent, + ChatCompletionRequestDeveloperMessage, ChatCompletionRequestDeveloperMessageContent, + ChatCompletionRequestFunctionMessage, ChatCompletionRequestMessage, + ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent, + ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent, + ChatCompletionRequestUserMessage, ChatCompletionRequestUserMessageContent, +}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use uuid::Uuid; + +use crate::compact::MessageSummary; +use models::rooms::room_message::Model as RoomMessageModel; + +/// Sender type for AI context, supporting all roles in the chat. +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub enum AiContextSenderType { + /// Regular user message + User, + /// AI assistant message + Ai, + /// System message (e.g., summary, notification) + System, + /// Developer message (for system-level instructions) + Developer, + /// Tool call message + Function, + /// Tool result message + FunctionResult, +} + +impl AiContextSenderType { + pub fn from_sender_type(sender_type: &models::rooms::MessageSenderType) -> Self { + match sender_type { + models::rooms::MessageSenderType::Member => Self::User, + models::rooms::MessageSenderType::Admin => Self::User, + models::rooms::MessageSenderType::Owner => Self::User, + models::rooms::MessageSenderType::Ai => Self::Ai, + models::rooms::MessageSenderType::System => Self::System, + models::rooms::MessageSenderType::Tool => Self::Function, + models::rooms::MessageSenderType::Guest => Self::User, + } + } +} + +/// Room message context for AI processing. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RoomMessageContext { + pub uid: Uuid, + pub sender_type: AiContextSenderType, + pub sender_uid: Option, + pub sender_name: Option, + pub content: String, + pub content_type: models::rooms::MessageContentType, + pub send_at: DateTime, + /// Tool call ID for FunctionResult messages, used to associate tool results with their calls. + pub tool_call_id: Option, +} + +impl RoomMessageContext { + pub fn from_model(model: &RoomMessageModel, sender_name: Option) -> Self { + Self { + uid: model.id, + sender_type: AiContextSenderType::from_sender_type(&model.sender_type), + sender_uid: model.sender_id, + sender_name, + content: model.content.clone(), + content_type: model.content_type.clone(), + send_at: model.send_at, + tool_call_id: Self::extract_tool_call_id(&model.content), + } + } + + fn extract_tool_call_id(content: &str) -> Option { + let content = content.trim(); + if let Ok(v) = serde_json::from_str::(content) { + v.get("tool_call_id") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + } else { + None + } + } + + pub fn from_model_with_names( + model: &RoomMessageModel, + user_names: &HashMap, + ) -> Self { + let sender_name = model + .sender_id + .and_then(|uid| user_names.get(&uid).cloned()); + Self::from_model(model, sender_name) + } + + pub fn to_message(&self) -> ChatCompletionRequestMessage { + match self.sender_type { + AiContextSenderType::User => { + ChatCompletionRequestMessage::User(ChatCompletionRequestUserMessage { + content: ChatCompletionRequestUserMessageContent::Text(self.display_content()), + name: self.sender_name.clone(), + }) + } + AiContextSenderType::Ai => { + ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage { + content: Some(ChatCompletionRequestAssistantMessageContent::Text( + self.display_content(), + )), + name: self.sender_name.clone(), + refusal: None, + audio: None, + tool_calls: None, + #[allow(deprecated)] + function_call: None, + }) + } + AiContextSenderType::System => { + ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage { + content: ChatCompletionRequestSystemMessageContent::Text( + self.display_content(), + ), + name: self.sender_name.clone(), + }) + } + AiContextSenderType::Developer => { + ChatCompletionRequestMessage::Developer(ChatCompletionRequestDeveloperMessage { + content: ChatCompletionRequestDeveloperMessageContent::Text( + self.display_content(), + ), + name: self.sender_name.clone(), + }) + } + AiContextSenderType::Function => { + ChatCompletionRequestMessage::Function(ChatCompletionRequestFunctionMessage { + content: Some(self.content.clone()), + name: self.display_content(), // Function name is stored in content + }) + } + AiContextSenderType::FunctionResult => { + ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage { + content: ChatCompletionRequestToolMessageContent::Text(self.display_content()), + tool_call_id: self + .tool_call_id + .clone() + .unwrap_or_else(|| "unknown".to_string()), + }) + } + } + } + + fn display_content(&self) -> String { + let mut content = self.content.trim().to_string(); + if content.is_empty() { + content = match self.content_type { + models::rooms::MessageContentType::Text => "[empty]".to_string(), + models::rooms::MessageContentType::Image => "[image]".to_string(), + models::rooms::MessageContentType::Audio => "[audio]".to_string(), + models::rooms::MessageContentType::Video => "[video]".to_string(), + models::rooms::MessageContentType::File => "[file]".to_string(), + }; + } + + if let Some(sender_name) = &self.sender_name { + content = format!("[{}] {}", sender_name, content); + } + + content + } +} + +impl From<&RoomMessageModel> for RoomMessageContext { + fn from(model: &RoomMessageModel) -> Self { + RoomMessageContext::from_model(model, None) + } +} + +impl From for RoomMessageContext { + fn from(summary: MessageSummary) -> Self { + // Map MessageSenderType to AiContextSenderType + let sender_type = AiContextSenderType::from_sender_type(&summary.sender_type); + // For FunctionResult (tool results), ensure tool_call_id is set + let tool_call_id = if sender_type == AiContextSenderType::FunctionResult { + summary.tool_call_id + } else { + None + }; + Self { + uid: summary.id, + sender_type, + sender_uid: summary.sender_id, + sender_name: Some(summary.sender_name), + content: summary.content, + content_type: summary.content_type, + send_at: summary.send_at, + tool_call_id, + } + } +} diff --git a/libs/agent/chat/mod.rs b/libs/agent/chat/mod.rs new file mode 100644 index 0000000..57723e1 --- /dev/null +++ b/libs/agent/chat/mod.rs @@ -0,0 +1,61 @@ +use std::pin::Pin; + +use async_openai::types::chat::ChatCompletionTool; +use db::cache::AppCache; +use db::database::AppDatabase; +use models::agents::model; +use models::projects::project; +use models::repos::repo; +use models::rooms::{room, room_message}; +use models::users::user; +use std::collections::HashMap; +use uuid::Uuid; + +/// Maximum recursion rounds for tool-call loops (AI → tool → result → AI). +pub const DEFAULT_MAX_TOOL_DEPTH: usize = 3; + +/// A single chunk from an AI streaming response. +#[derive(Debug, Clone)] +pub struct AiStreamChunk { + pub content: String, + pub done: bool, +} + +/// Optional streaming callback: called for each token chunk. +pub type StreamCallback = Box< + dyn Fn(AiStreamChunk) -> Pin + Send>> + Send + Sync, +>; + +pub struct AiRequest { + pub db: AppDatabase, + pub cache: AppCache, + pub model: model::Model, + pub project: project::Model, + pub sender: user::Model, + pub room: room::Model, + pub input: String, + pub mention: Vec, + pub history: Vec, + /// Optional user name mapping: user_id -> username + pub user_names: HashMap, + pub temperature: f64, + pub max_tokens: i32, + pub top_p: f64, + pub frequency_penalty: f64, + pub presence_penalty: f64, + pub think: bool, + /// OpenAI tool definitions. If None or empty, tool calling is disabled. + pub tools: Option>, + /// Maximum tool-call recursion depth (AI → tool → result → AI loops). Default: 3. + pub max_tool_depth: usize, +} + +pub enum Mention { + User(user::Model), + Repo(repo::Model), +} + +pub mod context; +pub mod service; +pub use context::{AiContextSenderType, RoomMessageContext}; +pub use service::ChatService; diff --git a/libs/agent/chat/service.rs b/libs/agent/chat/service.rs new file mode 100644 index 0000000..7dd2a15 --- /dev/null +++ b/libs/agent/chat/service.rs @@ -0,0 +1,655 @@ +use async_openai::Client; +use async_openai::config::OpenAIConfig; +use async_openai::types::chat::{ + ChatCompletionMessageToolCalls, ChatCompletionRequestAssistantMessage, + ChatCompletionRequestAssistantMessageContent, ChatCompletionRequestMessage, + ChatCompletionRequestSystemMessage, ChatCompletionRequestUserMessage, ChatCompletionTool, + ChatCompletionTools, CreateChatCompletionRequest, CreateChatCompletionResponse, + CreateChatCompletionStreamResponse, FinishReason, ReasoningEffort, ToolChoiceOptions, +}; +use futures::StreamExt; +use models::projects::project_skill; +use models::rooms::room_ai; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; +use uuid::Uuid; + +use super::context::RoomMessageContext; +use super::{AiRequest, AiStreamChunk, Mention, StreamCallback}; +use crate::compact::{CompactConfig, CompactService}; +use crate::embed::EmbedService; +use crate::error::{AgentError, Result}; +use crate::perception::{PerceptionService, SkillEntry, ToolCallEvent}; +use crate::tool::{ToolCall, ToolContext, ToolExecutor}; + +/// Service for handling AI chat requests in rooms. +pub struct ChatService { + openai_client: Client, + compact_service: Option, + embed_service: Option, + perception_service: PerceptionService, +} + +impl ChatService { + pub fn new(openai_client: Client) -> Self { + Self { + openai_client, + compact_service: None, + embed_service: None, + perception_service: PerceptionService::default(), + } + } + + pub fn with_compact_service(mut self, compact_service: CompactService) -> Self { + self.compact_service = Some(compact_service); + self + } + + pub fn with_embed_service(mut self, embed_service: EmbedService) -> Self { + self.embed_service = Some(embed_service); + self + } + + pub fn with_perception_service(mut self, perception_service: PerceptionService) -> Self { + self.perception_service = perception_service; + self + } + + #[allow(deprecated)] + pub async fn process(&self, request: AiRequest) -> Result { + let tools: Vec = request.tools.clone().unwrap_or_default(); + let tools_enabled = !tools.is_empty(); + let tool_choice = tools_enabled.then(|| { + async_openai::types::chat::ChatCompletionToolChoiceOption::Mode(ToolChoiceOptions::Auto) + }); + let think = request.think; + let max_tool_depth = request.max_tool_depth; + let top_p = request.top_p; + let frequency_penalty = request.frequency_penalty; + let presence_penalty = request.presence_penalty; + let temperature_f = request.temperature; + let max_tokens_i = request.max_tokens; + + let mut messages = self.build_messages(&request).await?; + + let room_ai = room_ai::Entity::find() + .filter(room_ai::Column::Room.eq(request.room.id)) + .filter(room_ai::Column::Model.eq(request.model.id)) + .one(&request.db) + .await?; + + let model_name = request.model.name.clone(); + let temperature = room_ai + .as_ref() + .and_then(|r| r.temperature.map(|v| v as f32)) + .unwrap_or(temperature_f as f32); + let max_tokens = room_ai + .as_ref() + .and_then(|r| r.max_tokens.map(|v| v as u32)) + .unwrap_or(max_tokens_i as u32); + let mut tool_depth = 0; + + loop { + let req = CreateChatCompletionRequest { + model: model_name.clone(), + messages: messages.clone(), + temperature: Some(temperature), + max_completion_tokens: Some(max_tokens), + top_p: Some(top_p as f32), + frequency_penalty: Some(frequency_penalty as f32), + presence_penalty: Some(presence_penalty as f32), + stream: Some(false), + reasoning_effort: Some(if think { + ReasoningEffort::High + } else { + ReasoningEffort::None + }), + tools: if tools_enabled { + Some( + tools + .iter() + .map(|t| ChatCompletionTools::Function(t.clone())) + .collect(), + ) + } else { + None + }, + tool_choice: tool_choice.clone(), + ..Default::default() + }; + + let response: CreateChatCompletionResponse = self + .openai_client + .chat() + .create(req) + .await + .map_err(|e| AgentError::OpenAi(e.to_string()))?; + + let choice = response + .choices + .into_iter() + .next() + .ok_or_else(|| AgentError::Internal("no choice in response".into()))?; + + if tools_enabled { + if let Some(ref tool_calls) = choice.message.tool_calls { + if !tool_calls.is_empty() { + messages.push(ChatCompletionRequestMessage::Assistant( + ChatCompletionRequestAssistantMessage { + content: choice + .message + .content + .clone() + .map(ChatCompletionRequestAssistantMessageContent::Text), + name: None, + refusal: None, + audio: None, + tool_calls: Some(tool_calls.clone()), + function_call: None, + }, + )); + + let calls: Vec = tool_calls + .iter() + .filter_map(|tc| { + if let ChatCompletionMessageToolCalls::Function( + async_openai::types::chat::ChatCompletionMessageToolCall { + id, + function, + }, + ) = tc + { + Some(ToolCall { + id: id.clone(), + name: function.name.clone(), + arguments: function.arguments.clone(), + }) + } else { + None + } + }) + .collect(); + + if !calls.is_empty() { + let tool_messages = self.execute_tool_calls(calls, &request).await?; + messages.extend(tool_messages); + + tool_depth += 1; + if tool_depth >= max_tool_depth { + return Ok(String::new()); + } + continue; + } + } + } + } + + let text = choice.message.content.unwrap_or_default(); + return Ok(text); + } + } + + #[allow(deprecated)] + pub async fn process_stream(&self, request: AiRequest, on_chunk: StreamCallback) -> Result<()> { + let tools: Vec = request.tools.clone().unwrap_or_default(); + let tools_enabled = !tools.is_empty(); + let tool_choice = tools_enabled.then(|| { + async_openai::types::chat::ChatCompletionToolChoiceOption::Mode(ToolChoiceOptions::Auto) + }); + let think = request.think; + let max_tool_depth = request.max_tool_depth; + let top_p = request.top_p; + let frequency_penalty = request.frequency_penalty; + let presence_penalty = request.presence_penalty; + let temperature_f = request.temperature; + let max_tokens_i = request.max_tokens; + + let mut messages = self.build_messages(&request).await?; + + let room_ai = room_ai::Entity::find() + .filter(room_ai::Column::Room.eq(request.room.id)) + .filter(room_ai::Column::Model.eq(request.model.id)) + .one(&request.db) + .await?; + + let model_name = request.model.name.clone(); + let temperature = room_ai + .as_ref() + .and_then(|r| r.temperature.map(|v| v as f32)) + .unwrap_or(temperature_f as f32); + let max_tokens = room_ai + .as_ref() + .and_then(|r| r.max_tokens.map(|v| v as u32)) + .unwrap_or(max_tokens_i as u32); + let mut tool_depth = 0; + + loop { + let req = CreateChatCompletionRequest { + model: model_name.clone(), + messages: messages.clone(), + temperature: Some(temperature), + max_completion_tokens: Some(max_tokens), + top_p: Some(top_p as f32), + frequency_penalty: Some(frequency_penalty as f32), + presence_penalty: Some(presence_penalty as f32), + stream: Some(true), + reasoning_effort: Some(if think { + ReasoningEffort::High + } else { + ReasoningEffort::None + }), + tools: if tools_enabled { + Some( + tools + .iter() + .map(|t| ChatCompletionTools::Function(t.clone())) + .collect(), + ) + } else { + None + }, + tool_choice: tool_choice.clone(), + ..Default::default() + }; + + let mut stream = self + .openai_client + .chat() + .create_stream(req) + .await + .map_err(|e| AgentError::OpenAi(e.to_string()))?; + + let mut text_accumulated = String::new(); + let mut tool_call_chunks: Vec = Vec::new(); + let mut finish_reason: Option = None; + + while let Some(chunk_result) = stream.next().await { + let chunk: CreateChatCompletionStreamResponse = + chunk_result.map_err(|e| AgentError::OpenAi(e.to_string()))?; + + let choice = match chunk.choices.first() { + Some(c) => c, + None => continue, + }; + + // Track finish reason + if let Some(ref fr) = choice.finish_reason { + finish_reason = Some(fr.clone()); + } + + // Text delta + if let Some(content) = &choice.delta.content { + text_accumulated.push_str(content); + on_chunk(AiStreamChunk { + content: text_accumulated.clone(), + done: false, + }) + .await; + } + + // Tool call deltas + if let Some(ref tool_chunks) = choice.delta.tool_calls { + for tc in tool_chunks { + let idx = tc.index as usize; + if tool_call_chunks.len() <= idx { + tool_call_chunks.resize(idx + 1, ToolCallChunkAccum::default()); + } + if let Some(ref id) = tc.id { + tool_call_chunks[idx].id = Some(id.clone()); + } + if let Some(ref fc) = tc.function { + if let Some(ref name) = fc.name { + tool_call_chunks[idx].name.push_str(name); + } + if let Some(ref args) = fc.arguments { + tool_call_chunks[idx].arguments.push_str(args); + } + } + } + } + } + + let has_tool_calls = matches!( + finish_reason, + Some(FinishReason::ToolCalls) | Some(FinishReason::FunctionCall) + ); + + if has_tool_calls && tools_enabled { + // Send final text chunk + on_chunk(AiStreamChunk { + content: text_accumulated.clone(), + done: true, + }) + .await; + + // Build ToolCall list from accumulated chunks + let tool_calls: Vec<_> = tool_call_chunks + .into_iter() + .filter(|c| !c.name.is_empty()) + .map(|c| ToolCall { + id: c.id.unwrap_or_else(|| Uuid::new_v4().to_string()), + name: c.name, + arguments: c.arguments, + }) + .collect(); + + if !tool_calls.is_empty() { + // Append assistant message with tool calls to history + messages.push(ChatCompletionRequestMessage::Assistant( + ChatCompletionRequestAssistantMessage { + content: Some( + ChatCompletionRequestAssistantMessageContent::Text( + text_accumulated, + ), + ), + name: None, + refusal: None, + audio: None, + tool_calls: Some( + tool_calls + .iter() + .map(|tc| { + ChatCompletionMessageToolCalls::Function( + async_openai::types::chat::ChatCompletionMessageToolCall { + id: tc.id.clone(), + function: async_openai::types::chat::FunctionCall { + name: tc.name.clone(), + arguments: tc.arguments.clone(), + }, + }, + ) + }) + .collect(), + ), + function_call: None, + }, + )); + + let tool_messages = self.execute_tool_calls(tool_calls, &request).await?; + messages.extend(tool_messages); + + tool_depth += 1; + if tool_depth >= max_tool_depth { + return Ok(()); + } + continue; + } + } + + on_chunk(AiStreamChunk { + content: text_accumulated, + done: true, + }) + .await; + return Ok(()); + } + } + + /// Executes a batch of tool calls and returns the tool result messages. + async fn execute_tool_calls( + &self, + calls: Vec, + request: &AiRequest, + ) -> Result> { + let mut ctx = ToolContext::new( + request.db.clone(), + request.cache.clone(), + request.room.id, + Some(request.sender.uid), + ) + .with_project(request.project.id); + + let executor = ToolExecutor::new(); + + let results = executor + .execute_batch(calls, &mut ctx) + .await + .map_err(|e| AgentError::Internal(e.to_string()))?; + + Ok(ToolExecutor::to_tool_messages(&results)) + } + + async fn build_messages( + &self, + request: &AiRequest, + ) -> Result> { + let mut messages = Vec::new(); + + let mut processed_history = Vec::new(); + if let Some(compact_service) = &self.compact_service { + // Auto-compact: only compresses when token count exceeds threshold + let config = CompactConfig::default(); + match compact_service + .compact_room_auto(request.room.id, Some(request.user_names.clone()), config) + .await + { + Ok(compact_summary) => { + if !compact_summary.summary.is_empty() { + messages.push(ChatCompletionRequestMessage::System( + ChatCompletionRequestSystemMessage { + content: async_openai::types::chat::ChatCompletionRequestSystemMessageContent::Text( + format!("Conversation summary:\n{}", compact_summary.summary), + ), + ..Default::default() + }, + )); + } + processed_history = compact_summary.retained; + } + Err(e) => { + let _ = e; + } + } + } + + if !processed_history.is_empty() { + for msg_summary in processed_history { + let ctx = RoomMessageContext::from(msg_summary); + messages.push(ctx.to_message()); + } + } else { + for msg in &request.history { + let ctx = RoomMessageContext::from_model_with_names(msg, &request.user_names); + messages.push(ctx.to_message()); + } + } + + if let Some(embed_service) = &self.embed_service { + for mention in &request.mention { + match mention { + Mention::Repo(repo) => { + let query = format!( + "{} {}", + repo.repo_name, + repo.description.as_deref().unwrap_or_default() + ); + match embed_service.search_issues(&query, 5).await { + Ok(issues) if !issues.is_empty() => { + let context = format!( + "Related issues:\n{}", + issues + .iter() + .map(|i| format!("- {}", i.payload.text)) + .collect::>() + .join("\n") + ); + messages.push(ChatCompletionRequestMessage::System( + ChatCompletionRequestSystemMessage { + content: async_openai::types::chat::ChatCompletionRequestSystemMessageContent::Text( + context, + ), + ..Default::default() + }, + )); + } + Err(e) => { + let _ = e; + } + _ => {} + } + match embed_service.search_repos(&query, 3).await { + Ok(repos) if !repos.is_empty() => { + let context = format!( + "Related repositories:\n{}", + repos + .iter() + .map(|r| format!("- {}", r.payload.text)) + .collect::>() + .join("\n") + ); + messages.push(ChatCompletionRequestMessage::System( + ChatCompletionRequestSystemMessage { + content: async_openai::types::chat::ChatCompletionRequestSystemMessageContent::Text( + context, + ), + ..Default::default() + }, + )); + } + Err(e) => { + let _ = e; + } + _ => {} + } + } + Mention::User(user) => { + let mut profile_parts = vec![format!("Username: {}", user.username)]; + if let Some(ref display_name) = user.display_name { + profile_parts.push(format!("Display name: {}", display_name)); + } + if let Some(ref org) = user.organization { + profile_parts.push(format!("Organization: {}", org)); + } + if let Some(ref website) = user.website_url { + profile_parts.push(format!("Website: {}", website)); + } + messages.push(ChatCompletionRequestMessage::System( + ChatCompletionRequestSystemMessage { + content: async_openai::types::chat::ChatCompletionRequestSystemMessageContent::Text( + format!("Mentioned user profile:\n{}", profile_parts.join("\n")), + ), + ..Default::default() + }, + )); + } + } + } + } + + // Inject relevant skills via the perception system (auto + active + passive). + let skill_contexts = self.build_skill_context(request).await; + for ctx in skill_contexts { + messages.push(ctx.to_system_message() as ChatCompletionRequestMessage); + } + + // Inject relevant past conversation memories via vector similarity. + let memories = self.build_memory_context(request).await; + for mem in memories { + messages.push(mem.to_system_message()); + } + + messages.push(ChatCompletionRequestMessage::User( + ChatCompletionRequestUserMessage { + content: async_openai::types::chat::ChatCompletionRequestUserMessageContent::Text( + request.input.clone(), + ), + ..Default::default() + }, + )); + + Ok(messages) + } + + /// Fetch enabled skills for the current project and run them through the + /// perception system (auto + active + passive) to inject relevant context. + async fn build_skill_context( + &self, + request: &AiRequest, + ) -> Vec { + // Fetch enabled skills for this project. + let skills: Vec = match project_skill::Entity::find() + .filter(project_skill::Column::ProjectUuid.eq(request.project.id)) + .filter(project_skill::Column::Enabled.eq(true)) + .all(&request.db) + .await + { + Ok(models) => models + .into_iter() + .map(|s| SkillEntry { + slug: s.slug, + name: s.name, + description: s.description, + content: s.content, + }) + .collect(), + Err(_) => return Vec::new(), + }; + + if skills.is_empty() { + return Vec::new(); + } + + // Build history text for auto-awareness scoring. + let history_texts: Vec = request + .history + .iter() + .rev() + .take(10) + .map(|msg| msg.content.clone()) + .collect(); + + // Active + passive + auto perception (keyword-based). + let tool_events: Vec = Vec::new(); // Tool calls tracked in loop via process() + let keyword_skills = self + .perception_service + .inject_skills(&request.input, &history_texts, &tool_events, &skills) + .await; + + // Vector-aware active perception: semantic search for skills via Qdrant. + let mut vector_skills = Vec::new(); + if let Some(embed_service) = &self.embed_service { + let awareness = crate::perception::VectorActiveAwareness::default(); + vector_skills = awareness + .detect(embed_service, &request.input, &request.project.id.to_string()) + .await; + } + + // Merge: deduplicate by label, preferring vector results (higher signal). + let mut seen = std::collections::HashSet::new(); + let mut result = Vec::new(); + for ctx in vector_skills { + if seen.insert(ctx.label.clone()) { + result.push(ctx); + } + } + for ctx in keyword_skills { + if seen.insert(ctx.label.clone()) { + result.push(ctx); + } + } + + result + } + + /// Inject relevant past conversation memories via vector similarity search. + async fn build_memory_context( + &self, + request: &AiRequest, + ) -> Vec { + let embed_service = match &self.embed_service { + Some(s) => s, + None => return Vec::new(), + }; + + // Search memories by current input semantic similarity. + let awareness = crate::perception::VectorPassiveAwareness::default(); + awareness + .detect(embed_service, &request.input, &request.room.id.to_string()) + .await + } +} +#[derive(Clone, Debug, Default)] +struct ToolCallChunkAccum { + id: Option, + name: String, + arguments: String, +} diff --git a/libs/agent/client.rs b/libs/agent/client.rs new file mode 100644 index 0000000..ef1cf89 --- /dev/null +++ b/libs/agent/client.rs @@ -0,0 +1,279 @@ +//! Unified AI client with built-in retry, token tracking, and session recording. +//! +//! Provides a single entry point for all AI calls with: +//! - Exponential backoff with jitter (max 3 retries) +//! - Retryable error classification (429/500/502/503/504) +//! - Token usage tracking (input/output) + +use async_openai::Client; +use async_openai::config::OpenAIConfig; +use async_openai::types::chat::{ + ChatCompletionRequestMessage, ChatCompletionTool, ChatCompletionToolChoiceOption, + ChatCompletionTools, CreateChatCompletionRequest, CreateChatCompletionResponse, +}; +use std::time::Instant; + +use crate::error::{AgentError, Result}; + +/// Configuration for the AI client. +#[derive(Clone)] +pub struct AiClientConfig { + pub api_key: String, + pub base_url: Option, +} + +impl AiClientConfig { + pub fn new(api_key: String) -> Self { + Self { + api_key, + base_url: None, + } + } + + pub fn with_base_url(mut self, base_url: impl Into) -> Self { + self.base_url = Some(base_url.into()); + self + } + + pub fn build_client(&self) -> Client { + let mut config = OpenAIConfig::new().with_api_key(&self.api_key); + if let Some(ref url) = self.base_url { + config = config.with_api_base(url); + } + Client::with_config(config) + } +} + +/// Response from an AI call, including usage statistics. +#[derive(Debug, Clone)] +pub struct AiCallResponse { + pub content: String, + pub input_tokens: i64, + pub output_tokens: i64, + pub latency_ms: i64, +} + +impl AiCallResponse { + pub fn total_tokens(&self) -> i64 { + self.input_tokens + self.output_tokens + } +} + +/// Internal state for retry tracking. +#[derive(Debug)] +struct RetryState { + attempt: u32, + max_retries: u32, + max_backoff_ms: u64, +} + +impl RetryState { + fn new(max_retries: u32) -> Self { + Self { + attempt: 0, + max_retries, + max_backoff_ms: 5000, + } + } + + fn should_retry(&self) -> bool { + self.attempt < self.max_retries + } + + /// Calculate backoff duration with "full jitter" technique. + fn backoff_duration(&self) -> std::time::Duration { + let exp = self.attempt.min(5); + // base = 500 * 2^exp, capped at max_backoff_ms + let base_ms = 500u64 + .saturating_mul(2u64.pow(exp)) + .min(self.max_backoff_ms); + // jitter: random [0, base_ms/2] + let jitter = (fastrand_u64(base_ms / 2 + 1)) as u64; + std::time::Duration::from_millis(base_ms / 2 + jitter) + } + + fn next(&mut self) { + self.attempt += 1; + } +} + +/// Fast pseudo-random u64 using a simple LCG. +/// Good enough for jitter — not for cryptography. +fn fastrand_u64(n: u64) -> u64 { + use std::sync::atomic::{AtomicU64, Ordering}; + static STATE: AtomicU64 = AtomicU64::new(0x193_667_6a_5e_7c_57); + if n <= 1 { + return 0; + } + let mut current = STATE.load(Ordering::Relaxed); + loop { + let new_val = current.wrapping_mul(6364136223846793005).wrapping_add(1); + match STATE.compare_exchange_weak(current, new_val, Ordering::Relaxed, Ordering::Relaxed) { + Ok(_) => return new_val % n, + Err(actual) => current = actual, + } + } +} + +/// Determine if an error is retryable. +fn is_retryable_error(err: &async_openai::error::OpenAIError) -> bool { + use async_openai::error::OpenAIError; + match err { + // Network errors (DNS failure, connection refused, timeout) are always retryable + OpenAIError::Reqwest(_) => true, + // For API errors, check the error code string (e.g., "rate_limit_exceeded") + OpenAIError::ApiError(api_err) => api_err.code.as_ref().map_or(false, |code| { + matches!( + code.as_str(), + "rate_limit_exceeded" + | "internal_server_error" + | "service_unavailable" + | "gateway_timeout" + | "bad_gateway" + ) + }), + _ => false, + } +} + +/// Call the AI model with automatic retry. +pub async fn call_with_retry( + messages: &[ChatCompletionRequestMessage], + model: &str, + config: &AiClientConfig, + max_retries: Option, +) -> Result { + let client = config.build_client(); + let mut state = RetryState::new(max_retries.unwrap_or(3)); + + loop { + let start = Instant::now(); + + let req = CreateChatCompletionRequest { + model: model.to_string(), + messages: messages.to_vec(), + ..Default::default() + }; + + let result = client.chat().create(req).await; + + match result { + Ok(response) => { + let latency_ms = start.elapsed().as_millis() as i64; + let (input_tokens, output_tokens) = extract_usage(&response); + + return Ok(AiCallResponse { + content: extract_content(&response), + input_tokens, + output_tokens, + latency_ms, + }); + } + Err(err) => { + if state.should_retry() && is_retryable_error(&err) { + let duration = state.backoff_duration(); + eprintln!( + "AI call failed (attempt {}/{}), retrying in {:?}", + state.attempt + 1, + state.max_retries, + duration + ); + tokio::time::sleep(duration).await; + state.next(); + continue; + } + return Err(AgentError::OpenAi(err.to_string())); + } + } + } +} + +/// Call with custom parameters (temperature, max_tokens, optional tools). +pub async fn call_with_params( + messages: &[ChatCompletionRequestMessage], + model: &str, + config: &AiClientConfig, + temperature: f32, + max_tokens: u32, + max_retries: Option, + tools: Option<&[ChatCompletionTool]>, +) -> Result { + let client = config.build_client(); + let mut state = RetryState::new(max_retries.unwrap_or(3)); + + loop { + let start = Instant::now(); + + let req = CreateChatCompletionRequest { + model: model.to_string(), + messages: messages.to_vec(), + temperature: Some(temperature), + max_completion_tokens: Some(max_tokens), + tools: tools.map(|ts| { + ts.iter() + .map(|t| ChatCompletionTools::Function(t.clone())) + .collect() + }), + tool_choice: tools.filter(|ts| !ts.is_empty()).map(|_| { + ChatCompletionToolChoiceOption::Mode( + async_openai::types::chat::ToolChoiceOptions::Auto, + ) + }), + ..Default::default() + }; + + let result = client.chat().create(req).await; + + match result { + Ok(response) => { + let latency_ms = start.elapsed().as_millis() as i64; + let (input_tokens, output_tokens) = extract_usage(&response); + + return Ok(AiCallResponse { + content: extract_content(&response), + input_tokens, + output_tokens, + latency_ms, + }); + } + Err(err) => { + if state.should_retry() && is_retryable_error(&err) { + let duration = state.backoff_duration(); + eprintln!( + "AI call failed (attempt {}/{}), retrying in {:?}", + state.attempt + 1, + state.max_retries, + duration + ); + tokio::time::sleep(duration).await; + state.next(); + continue; + } + return Err(AgentError::OpenAi(err.to_string())); + } + } + } +} + +/// Extract text content from a chat completion response. +fn extract_content(response: &CreateChatCompletionResponse) -> String { + response + .choices + .first() + .and_then(|c| c.message.content.clone()) + .unwrap_or_default() +} + +/// Extract usage (input/output tokens) from a response. +fn extract_usage(response: &CreateChatCompletionResponse) -> (i64, i64) { + response + .usage + .as_ref() + .map(|u| { + ( + i64::try_from(u.prompt_tokens).unwrap_or(0), + i64::try_from(u.completion_tokens).unwrap_or(0), + ) + }) + .unwrap_or((0, 0)) +} diff --git a/libs/agent/compact/helpers.rs b/libs/agent/compact/helpers.rs new file mode 100644 index 0000000..80b8091 --- /dev/null +++ b/libs/agent/compact/helpers.rs @@ -0,0 +1,45 @@ +use super::types::{CompactSummary, MessageSummary}; + +pub fn messages_to_text( + messages: &[models::rooms::room_message::Model], + sender_mapper: F, +) -> String +where + F: Fn(&models::rooms::room_message::Model) -> String, +{ + messages + .iter() + .map(|m| { + let sender = sender_mapper(m); + format!("[{}] {}: {}", m.send_at, sender, m.content) + }) + .collect::>() + .join("\n") +} + +pub fn retained_as_text(retained: &[MessageSummary]) -> String { + retained + .iter() + .map(|m| format!("[{}] {}: {}", m.send_at, m.sender_name, m.content)) + .collect::>() + .join("\n") +} + +pub fn summary_content(summary: &CompactSummary) -> String { + if summary.summary.is_empty() { + format!( + "## Recent conversation ({} messages)\n\n{}", + summary.retained.len(), + retained_as_text(&summary.retained) + ) + } else { + format!( + "## Earlier conversation ({} messages summarised)\n{}\n\n\ + ## Most recent {} messages\n\n{}", + summary.messages_compressed, + summary.summary, + summary.retained.len(), + retained_as_text(&summary.retained) + ) + } +} diff --git a/libs/agent/compact/mod.rs b/libs/agent/compact/mod.rs new file mode 100644 index 0000000..7e2b56b --- /dev/null +++ b/libs/agent/compact/mod.rs @@ -0,0 +1,8 @@ +//! Context compaction for AI sessions and room message history. + +pub mod helpers; +pub mod service; +pub mod types; + +pub use service::CompactService; +pub use types::{CompactConfig, CompactLevel, CompactSummary, MessageSummary, ThresholdResult}; diff --git a/libs/agent/compact/service.rs b/libs/agent/compact/service.rs new file mode 100644 index 0000000..483fa6f --- /dev/null +++ b/libs/agent/compact/service.rs @@ -0,0 +1,467 @@ +use async_openai::Client; +use async_openai::config::OpenAIConfig; +use async_openai::types::chat::{ + ChatCompletionRequestMessage, ChatCompletionRequestUserMessage, CreateChatCompletionRequest, + CreateChatCompletionResponse, +}; +use chrono::Utc; +use models::ColumnTrait; +use models::rooms::room_message::{ + Column as RmCol, Entity as RoomMessage, Model as RoomMessageModel, +}; +use models::users::user::{Column as UserCol, Entity as User}; +use sea_orm::{DatabaseConnection, EntityTrait, QueryFilter, QueryOrder}; +use serde_json::Value; +use uuid::Uuid; + +use crate::AgentError; +use crate::compact::helpers::summary_content; +use crate::compact::types::{ + CompactConfig, CompactLevel, CompactSummary, MessageSummary, ThresholdResult, +}; +use crate::tokent::{TokenUsage, resolve_usage}; + +#[derive(Clone)] +pub struct CompactService { + db: DatabaseConnection, + openai: Client, + model: String, +} + +impl CompactService { + pub fn new(db: DatabaseConnection, openai: Client, model: String) -> Self { + Self { db, openai, model } + } + + pub async fn compact_room( + &self, + room_id: Uuid, + level: CompactLevel, + user_names: Option>, + ) -> Result { + let messages = self.fetch_room_messages(room_id).await?; + + let user_ids: Vec = messages + .iter() + .filter_map(|m| m.sender_id) + .collect::>() + .into_iter() + .collect(); + let user_name_map = match user_names { + Some(map) => map, + None => self.get_user_name_map(&user_ids).await?, + }; + + if messages.len() <= level.retain_count() { + let retained: Vec = messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + return Ok(CompactSummary { + session_id: Uuid::new_v4(), + room_id, + retained, + summary: String::new(), + compacted_at: Utc::now(), + messages_compressed: 0, + usage: None, + }); + } + + let retain_count = level.retain_count(); + let split_index = messages.len().saturating_sub(retain_count); + let (to_summarize, retained_messages) = messages.split_at(split_index); + + let retained: Vec = retained_messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + + let (summary, remote_usage) = self.summarize_messages(to_summarize).await?; + + // Build text of what was summarized (for tiktoken fallback) + let summarized_text = to_summarize + .iter() + .map(|m| m.content.as_str()) + .collect::>() + .join("\n"); + let usage = resolve_usage(remote_usage, &self.model, &summarized_text, &summary); + + Ok(CompactSummary { + session_id: Uuid::new_v4(), + room_id, + retained, + summary, + compacted_at: Utc::now(), + messages_compressed: to_summarize.len(), + usage: Some(usage), + }) + } + + pub async fn compact_session( + &self, + session_id: Uuid, + level: CompactLevel, + user_names: Option>, + ) -> Result { + let messages: Vec = RoomMessage::find() + .filter(RmCol::Room.eq(session_id)) + .order_by_asc(RmCol::Seq) + .all(&self.db) + .await + .map_err(|e| AgentError::Internal(e.to_string()))?; + + if messages.is_empty() { + return Err(AgentError::Internal("session has no messages".into())); + } + + let user_ids: Vec = messages + .iter() + .filter_map(|m| m.sender_id) + .collect::>() + .into_iter() + .collect(); + let user_name_map = match user_names { + Some(map) => map, + None => self.get_user_name_map(&user_ids).await?, + }; + + if messages.len() <= level.retain_count() { + let retained: Vec = messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + return Ok(CompactSummary { + session_id, + room_id: Uuid::nil(), + retained, + summary: String::new(), + compacted_at: Utc::now(), + messages_compressed: 0, + usage: None, + }); + } + + let retain_count = level.retain_count(); + let split_index = messages.len().saturating_sub(retain_count); + let (to_summarize, retained_messages) = messages.split_at(split_index); + + let retained: Vec = retained_messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + + // Summarize the earlier messages + let (summary, remote_usage) = self.summarize_messages(to_summarize).await?; + + // Build text of what was summarized (for tiktoken fallback) + let summarized_text = to_summarize + .iter() + .map(|m| m.content.as_str()) + .collect::>() + .join("\n"); + let usage = resolve_usage(remote_usage, &self.model, &summarized_text, &summary); + + Ok(CompactSummary { + session_id, + room_id: Uuid::nil(), + retained, + summary, + compacted_at: Utc::now(), + messages_compressed: to_summarize.len(), + usage: Some(usage), + }) + } + + pub fn summary_as_system_message(summary: &CompactSummary) -> ChatCompletionRequestMessage { + let content = summary_content(summary); + ChatCompletionRequestMessage::System( + async_openai::types::chat::ChatCompletionRequestSystemMessage { + content: async_openai::types::chat::ChatCompletionRequestSystemMessageContent::Text( + content, + ), + ..Default::default() + }, + ) + } + + /// Check if the message history for a room exceeds the token threshold. + /// Returns `ThresholdResult::Skip` if below threshold, `Compact` if above. + /// + /// This method fetches messages and estimates their token count using tiktoken. + /// Call this before deciding whether to run full compaction. + pub async fn check_threshold( + &self, + room_id: Uuid, + config: CompactConfig, + ) -> Result { + let messages = self.fetch_room_messages(room_id).await?; + let tokens = self.estimate_message_tokens(&messages); + + if tokens < config.token_threshold { + return Ok(ThresholdResult::Skip { + estimated_tokens: tokens, + }); + } + + let level = if config.auto_level { + CompactLevel::auto_select(tokens, config.token_threshold) + } else { + config.default_level + }; + + Ok(ThresholdResult::Compact { + estimated_tokens: tokens, + level, + }) + } + + /// Auto-compact a room: estimates token count, only compresses if over threshold. + /// + /// This is the recommended entry point for automatic compaction. + /// - If tokens < threshold → returns a no-op summary (empty summary, no compression) + /// - If tokens >= threshold → compresses with auto-selected level + pub async fn compact_room_auto( + &self, + room_id: Uuid, + user_names: Option>, + config: CompactConfig, + ) -> Result { + let threshold_result = self.check_threshold(room_id, config).await?; + + match threshold_result { + ThresholdResult::Skip { .. } => { + // Below threshold — no compaction needed, return empty summary + let messages = self.fetch_room_messages(room_id).await?; + let user_ids: Vec = messages.iter().filter_map(|m| m.sender_id).collect(); + let user_name_map = match user_names { + Some(map) => map, + None => self.get_user_name_map(&user_ids).await?, + }; + let retained: Vec = messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + + return Ok(CompactSummary { + session_id: Uuid::new_v4(), + room_id, + retained, + summary: String::new(), + compacted_at: Utc::now(), + messages_compressed: 0, + usage: None, + }); + } + ThresholdResult::Compact { level, .. } => { + // Above threshold — compress with selected level + return self + .compact_room_with_level(room_id, level, user_names) + .await; + } + } + } + + /// Compact a room with a specific level (bypassing threshold check). + /// Use this when the caller has already decided compaction is needed. + async fn compact_room_with_level( + &self, + room_id: Uuid, + level: CompactLevel, + user_names: Option>, + ) -> Result { + let messages = self.fetch_room_messages(room_id).await?; + + let user_ids: Vec = messages.iter().filter_map(|m| m.sender_id).collect(); + let user_name_map = match user_names { + Some(map) => map, + None => self.get_user_name_map(&user_ids).await?, + }; + + if messages.len() <= level.retain_count() { + let retained: Vec = messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + return Ok(CompactSummary { + session_id: Uuid::new_v4(), + room_id, + retained, + summary: String::new(), + compacted_at: Utc::now(), + messages_compressed: 0, + usage: None, + }); + } + + let retain_count = level.retain_count(); + let split_index = messages.len().saturating_sub(retain_count); + let (to_summarize, retained_messages) = messages.split_at(split_index); + + let retained: Vec = retained_messages + .iter() + .map(|m| Self::message_to_summary(m, &user_name_map)) + .collect(); + + let (summary, remote_usage) = self.summarize_messages(to_summarize).await?; + + let summarized_text = to_summarize + .iter() + .map(|m| m.content.as_str()) + .collect::>() + .join("\n"); + let usage = resolve_usage(remote_usage, &self.model, &summarized_text, &summary); + + Ok(CompactSummary { + session_id: Uuid::new_v4(), + room_id, + retained, + summary, + compacted_at: Utc::now(), + messages_compressed: to_summarize.len(), + usage: Some(usage), + }) + } + + /// Estimate total token count of a message list using tiktoken. + fn estimate_message_tokens(&self, messages: &[RoomMessageModel]) -> usize { + let total_chars: usize = messages.iter().map(|m| m.content.len()).sum(); + // Rough estimate: ~4 chars per token (safe upper bound) + total_chars / 4 + } + + fn message_to_summary( + m: &RoomMessageModel, + user_name_map: &std::collections::HashMap, + ) -> MessageSummary { + let sender_name = m + .sender_id + .and_then(|id| user_name_map.get(&id).cloned()) + .unwrap_or_else(|| m.sender_type.to_string()); + MessageSummary { + id: m.id, + sender_type: m.sender_type.clone(), + sender_id: m.sender_id, + sender_name, + content: m.content.clone(), + content_type: m.content_type.clone(), + tool_call_id: Self::extract_tool_call_id(&m.content), + send_at: m.send_at, + } + } + + fn extract_tool_call_id(content: &str) -> Option { + let content = content.trim(); + if let Ok(v) = serde_json::from_str::(content) { + v.get("tool_call_id") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + } else { + None + } + } + + async fn fetch_room_messages( + &self, + room_id: Uuid, + ) -> Result, AgentError> { + let messages: Vec = RoomMessage::find() + .filter(RmCol::Room.eq(room_id)) + .order_by_asc(RmCol::Seq) + .all(&self.db) + .await + .map_err(|e| AgentError::Internal(e.to_string()))?; + Ok(messages) + } + + async fn get_user_name_map( + &self, + user_ids: &[Uuid], + ) -> Result, AgentError> { + use std::collections::HashMap; + let mut map = HashMap::new(); + if !user_ids.is_empty() { + let users = User::find() + .filter(UserCol::Uid.is_in(user_ids.to_vec())) + .all(&self.db) + .await + .map_err(|e| AgentError::Internal(e.to_string()))?; + for user in users { + map.insert(user.uid, user.username); + } + } + Ok(map) + } + + async fn summarize_messages( + &self, + messages: &[RoomMessageModel], + ) -> Result<(String, Option), AgentError> { + // Collect distinct user IDs + let user_ids: Vec = messages + .iter() + .filter_map(|m| m.sender_id) + .collect::>() + .into_iter() + .collect(); + + // Query usernames + let user_name_map = self.get_user_name_map(&user_ids).await?; + + // Define sender mapper + let sender_mapper = |m: &RoomMessageModel| { + if let Some(user_id) = m.sender_id { + if let Some(username) = user_name_map.get(&user_id) { + return username.clone(); + } + } + m.sender_type.to_string() + }; + + let body = crate::compact::helpers::messages_to_text(messages, sender_mapper); + + let user_msg = ChatCompletionRequestMessage::User(ChatCompletionRequestUserMessage { + content: async_openai::types::chat::ChatCompletionRequestUserMessageContent::Text( + format!( + "Summarise the following conversation concisely, preserving all key facts, \ + decisions, and any pending or in-progress work. \ + Use this format:\n\n\ + **Summary:** \n\ + **Key decisions:** \n\ + **Open items:** \n\n\ + Conversation:\n\n{}", + body + ), + ), + ..Default::default() + }); + + let request = CreateChatCompletionRequest { + model: self.model.clone(), + messages: vec![user_msg], + stream: Some(false), + ..Default::default() + }; + + let response: CreateChatCompletionResponse = self + .openai + .chat() + .create(request) + .await + .map_err(|e| AgentError::OpenAi(e.to_string()))?; + + let text = response + .choices + .first() + .and_then(|c| c.message.content.clone()) + .unwrap_or_default(); + + // Prefer remote usage; fall back to None (caller will use tiktoken via resolve_usage) + let remote_usage = response + .usage + .as_ref() + .and_then(|u| TokenUsage::from_remote(u.prompt_tokens, u.completion_tokens)); + + Ok((text, remote_usage)) + } +} diff --git a/libs/agent/compact/types.rs b/libs/agent/compact/types.rs new file mode 100644 index 0000000..05b0493 --- /dev/null +++ b/libs/agent/compact/types.rs @@ -0,0 +1,130 @@ +use chrono::{DateTime, Utc}; +use models::rooms::{ + MessageContentType, MessageSenderType, room_message::Model as RoomMessageModel, +}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use uuid::Uuid; + +use crate::tokent::TokenUsage; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MessageSummary { + pub id: Uuid, + pub sender_type: MessageSenderType, + pub sender_id: Option, + pub sender_name: String, + pub content: String, + pub content_type: MessageContentType, + /// Tool call ID extracted from message content JSON, if present. + pub tool_call_id: Option, + pub send_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CompactSummary { + pub session_id: Uuid, + pub room_id: Uuid, + pub retained: Vec, + pub summary: String, + pub compacted_at: DateTime, + pub messages_compressed: usize, + /// Token usage for the compaction AI call. `None` if usage data was unavailable. + pub usage: Option, +} + +#[derive(Debug, Clone, Copy)] +pub enum CompactLevel { + Light, + Aggressive, +} + +impl CompactLevel { + pub fn retain_count(&self) -> usize { + match self { + CompactLevel::Light => 5, + CompactLevel::Aggressive => 2, + } + } + + /// Auto-select level based on estimated token count and config. + /// + /// - `Light` (retain 5): when tokens are moderately over threshold + /// - `Aggressive` (retain 2): when tokens are severely over threshold (2x+) + pub fn auto_select(estimated_tokens: usize, threshold: usize) -> Self { + if threshold == 0 { + return CompactLevel::Light; + } + if estimated_tokens >= threshold * 2 { + CompactLevel::Aggressive + } else { + CompactLevel::Light + } + } +} + +/// Configuration for automatic compaction. +#[derive(Debug, Clone, Copy)] +pub struct CompactConfig { + /// Only trigger compaction when estimated token count exceeds this. + /// Set to 0 to disable threshold (always compact when messages > retain_count). + pub token_threshold: usize, + /// If true, auto-select level based on how far over the threshold we are. + /// If false, always use `default_level`. + pub auto_level: bool, + /// Fallback level when `auto_level` is false. + pub default_level: CompactLevel, +} + +impl Default for CompactConfig { + fn default() -> Self { + // Trigger when estimated tokens exceed ~8k (reasonable for a context window) + Self { + token_threshold: 8000, + auto_level: true, + default_level: CompactLevel::Light, + } + } +} + +/// Result of a threshold check before deciding whether to compact. +#[derive(Debug)] +pub enum ThresholdResult { + /// Token count is below threshold — skip compaction. + Skip { estimated_tokens: usize }, + /// Token count exceeds threshold — compact with this level. + Compact { + estimated_tokens: usize, + level: CompactLevel, + }, +} + +impl From for MessageSummary { + fn from(m: RoomMessageModel) -> Self { + let sender_type = m.sender_type.clone(); + let content = m.content.clone(); + Self { + id: m.id, + sender_type: sender_type.clone(), + sender_id: m.sender_id, + sender_name: sender_type.to_string(), + content, + content_type: m.content_type.clone(), + tool_call_id: Self::extract_tool_call_id(&m.content), + send_at: m.send_at, + } + } +} + +impl MessageSummary { + fn extract_tool_call_id(content: &str) -> Option { + let content = content.trim(); + if let Ok(v) = serde_json::from_str::(content) { + v.get("tool_call_id") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + } else { + None + } + } +} diff --git a/libs/agent/embed/client.rs b/libs/agent/embed/client.rs new file mode 100644 index 0000000..d0b3358 --- /dev/null +++ b/libs/agent/embed/client.rs @@ -0,0 +1,209 @@ +use async_openai::Client; +use async_openai::types::embeddings::CreateEmbeddingRequestArgs; +use serde::{Deserialize, Serialize}; + +use crate::embed::qdrant::QdrantClient; + +pub struct EmbedClient { + openai: Client, + qdrant: QdrantClient, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbedVector { + pub id: String, + pub vector: Vec, + pub payload: EmbedPayload, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbedPayload { + pub entity_type: String, + pub entity_id: String, + pub text: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub extra: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + pub id: String, + pub score: f32, + pub payload: EmbedPayload, +} + +impl EmbedClient { + pub fn new(openai: Client, qdrant: QdrantClient) -> Self { + Self { openai, qdrant } + } + + pub async fn embed_text(&self, text: &str, model: &str) -> crate::Result> { + let request = CreateEmbeddingRequestArgs::default() + .model(model) + .input(text) + .build() + .map_err(|e| crate::AgentError::OpenAi(e.to_string()))?; + + let response = self + .openai + .embeddings() + .create(request) + .await + .map_err(|e| crate::AgentError::OpenAi(e.to_string()))?; + + response + .data + .first() + .map(|d| d.embedding.clone()) + .ok_or_else(|| crate::AgentError::OpenAi("no embedding returned".into())) + } + + pub async fn embed_batch(&self, texts: &[String], model: &str) -> crate::Result>> { + let request = CreateEmbeddingRequestArgs::default() + .model(model) + .input(texts.to_vec()) + .build() + .map_err(|e| crate::AgentError::OpenAi(e.to_string()))?; + + let response = self + .openai + .embeddings() + .create(request) + .await + .map_err(|e| crate::AgentError::OpenAi(e.to_string()))?; + + let mut embeddings = vec![Vec::new(); texts.len()]; + for data in response.data { + if (data.index as usize) < embeddings.len() { + embeddings[data.index as usize] = data.embedding; + } + } + Ok(embeddings) + } + + pub async fn upsert(&self, points: Vec) -> crate::Result<()> { + self.qdrant.upsert_points(points).await + } + + pub async fn search( + &self, + query: &str, + entity_type: &str, + model: &str, + limit: usize, + ) -> crate::Result> { + let vector = self.embed_text(query, model).await?; + self.qdrant.search(&vector, entity_type, limit).await + } + + pub async fn search_with_filter( + &self, + query: &str, + entity_type: &str, + model: &str, + limit: usize, + filter: qdrant_client::qdrant::Filter, + ) -> crate::Result> { + let vector = self.embed_text(query, model).await?; + self.qdrant + .search_with_filter(&vector, entity_type, limit, filter) + .await + } + + pub async fn delete_by_entity_id( + &self, + entity_type: &str, + entity_id: &str, + ) -> crate::Result<()> { + self.qdrant.delete_by_filter(entity_type, entity_id).await + } + + pub async fn ensure_collection(&self, entity_type: &str, dimensions: u64) -> crate::Result<()> { + self.qdrant.ensure_collection(entity_type, dimensions).await + } + + pub async fn ensure_memory_collection(&self, dimensions: u64) -> crate::Result<()> { + self.qdrant.ensure_memory_collection(dimensions).await + } + + pub async fn ensure_skill_collection(&self, dimensions: u64) -> crate::Result<()> { + self.qdrant.ensure_skill_collection(dimensions).await + } + + /// Embed and store a conversation memory (message) in Qdrant. + pub async fn embed_memory( + &self, + id: &str, + text: &str, + room_id: &str, + user_id: Option<&str>, + ) -> crate::Result<()> { + let vector = self.embed_text(text, "").await?; + let point = EmbedVector { + id: id.to_string(), + vector, + payload: EmbedPayload { + entity_type: "memory".to_string(), + entity_id: room_id.to_string(), + text: text.to_string(), + extra: serde_json::json!({ "user_id": user_id }).into(), + }, + }; + self.qdrant.upsert_points(vec![point]).await + } + + /// Search memory embeddings by semantic similarity within a room. + pub async fn search_memories( + &self, + query: &str, + model: &str, + room_id: &str, + limit: usize, + ) -> crate::Result> { + let vector = self.embed_text(query, model).await?; + let mut results = self.qdrant.search_memory(&vector, limit + 1).await?; + // Filter to the specific room + results.retain(|r| r.payload.entity_id == room_id); + results.truncate(limit); + Ok(results) + } + + /// Embed and store a skill in Qdrant. + pub async fn embed_skill( + &self, + id: &str, + name: &str, + description: &str, + content: &str, + project_uuid: &str, + ) -> crate::Result<()> { + let text = format!("{}: {} {}", name, description, content); + let vector = self.embed_text(&text, "").await?; + let point = EmbedVector { + id: id.to_string(), + vector, + payload: EmbedPayload { + entity_type: "skill".to_string(), + entity_id: project_uuid.to_string(), + text, + extra: serde_json::json!({ "name": name, "description": description }).into(), + }, + }; + self.qdrant.upsert_points(vec![point]).await + } + + /// Search skill embeddings by semantic similarity within a project. + pub async fn search_skills( + &self, + query: &str, + model: &str, + project_uuid: &str, + limit: usize, + ) -> crate::Result> { + let vector = self.embed_text(query, model).await?; + let mut results = self.qdrant.search_skill(&vector, limit + 1).await?; + results.retain(|r| r.payload.entity_id == project_uuid); + results.truncate(limit); + Ok(results) + } +} diff --git a/libs/agent/embed/mod.rs b/libs/agent/embed/mod.rs new file mode 100644 index 0000000..e6daffb --- /dev/null +++ b/libs/agent/embed/mod.rs @@ -0,0 +1,30 @@ +pub mod client; +pub mod qdrant; +pub mod service; + +use async_openai::config::OpenAIConfig; + +pub use client::{EmbedClient, EmbedPayload, EmbedVector, SearchResult}; +pub use qdrant::QdrantClient; +pub use service::{EmbedService, Embeddable}; + +pub async fn new_embed_client(config: &config::AppConfig) -> crate::Result { + let base_url = config + .get_embed_model_base_url() + .map_err(|e| crate::AgentError::Internal(e.to_string()))?; + let api_key = config + .get_embed_model_api_key() + .map_err(|e| crate::AgentError::Internal(e.to_string()))?; + let qdrant_url = config + .get_qdrant_url() + .map_err(|e| crate::AgentError::Internal(e.to_string()))?; + let qdrant_api_key = config.get_qdrant_api_key(); + + let openai = async_openai::Client::with_config( + OpenAIConfig::new() + .with_api_base(base_url) + .with_api_key(api_key), + ); + let qdrant = QdrantClient::new(&qdrant_url, qdrant_api_key.as_deref()).await?; + Ok(EmbedClient::new(openai, qdrant)) +} diff --git a/libs/agent/embed/qdrant.rs b/libs/agent/embed/qdrant.rs new file mode 100644 index 0000000..3f996cb --- /dev/null +++ b/libs/agent/embed/qdrant.rs @@ -0,0 +1,312 @@ +use qdrant_client::Qdrant; +use qdrant_client::qdrant::{ + Condition, CreateCollectionBuilder, DeletePointsBuilder, Distance, FieldCondition, Filter, + Match, PointStruct, SearchPointsBuilder, UpsertPointsBuilder, VectorParamsBuilder, Vectors, + condition::ConditionOneOf, r#match::MatchValue, point_id::PointIdOptions, value, +}; +use std::collections::HashMap; +use std::sync::Arc; + +use super::client::{EmbedPayload, SearchResult}; +use crate::embed::client::EmbedVector; + +pub struct QdrantClient { + inner: Arc, +} + +impl Clone for QdrantClient { + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } +} + +impl QdrantClient { + pub async fn new(url: &str, api_key: Option<&str>) -> crate::Result { + let mut builder = Qdrant::from_url(url); + if let Some(key) = api_key { + builder = builder.api_key(key); + } + let inner = builder + .build() + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + Ok(Self { + inner: Arc::new(inner), + }) + } + + fn collection_name(entity_type: &str) -> String { + format!("embed_{}", entity_type) + } + + pub async fn ensure_collection(&self, entity_type: &str, dimensions: u64) -> crate::Result<()> { + let name = Self::collection_name(entity_type); + let exists = self + .inner + .collection_exists(&name) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + if exists { + return Ok(()); + } + + let create_collection = CreateCollectionBuilder::new(name) + .vectors_config(VectorParamsBuilder::new(dimensions, Distance::Cosine)) + .build(); + + self.inner + .create_collection(create_collection) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + Ok(()) + } + + pub async fn upsert_points(&self, points: Vec) -> crate::Result<()> { + if points.is_empty() { + return Ok(()); + } + + let collection_name = Self::collection_name(&points[0].payload.entity_type); + + let qdrant_points: Vec = points + .into_iter() + .map(|p| { + let mut payload: HashMap = HashMap::new(); + payload.insert("entity_type".to_string(), p.payload.entity_type.into()); + payload.insert("entity_id".to_string(), p.payload.entity_id.into()); + payload.insert("text".to_string(), p.payload.text.into()); + if let Some(extra) = p.payload.extra { + let extra_str = serde_json::to_string(&extra).unwrap_or_default(); + payload.insert( + "extra".to_string(), + qdrant_client::qdrant::Value { + kind: Some( + qdrant_client::qdrant::value::Kind::StringValue(extra_str), + ), + }, + ); + } + + PointStruct::new(p.id, Vectors::from(p.vector), payload) + }) + .collect(); + + let upsert = UpsertPointsBuilder::new(collection_name, qdrant_points).build(); + + self.inner + .upsert_points(upsert) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + Ok(()) + } + + fn extract_string(value: &qdrant_client::qdrant::Value) -> String { + match &value.kind { + Some(value::Kind::StringValue(s)) => s.clone(), + _ => String::new(), + } + } + + pub async fn search( + &self, + vector: &[f32], + entity_type: &str, + limit: usize, + ) -> crate::Result> { + let collection_name = Self::collection_name(entity_type); + + let search = SearchPointsBuilder::new(collection_name, vector.to_vec(), limit as u64) + .with_payload(true) + .build(); + + let results = self + .inner + .search_points(search) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + Ok(results + .result + .into_iter() + .filter_map(|p| { + let entity_type = p + .payload + .get(&"entity_type".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let entity_id = p + .payload + .get(&"entity_id".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let text = p + .payload + .get(&"text".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let extra = p + .payload + .get(&"extra".to_string()) + .and_then(|v| Some(Self::extract_string(v))) + .and_then(|s| serde_json::from_str::(&s).ok()); + + let id = + p.id.and_then(|id| id.point_id_options) + .map(|opts| match opts { + PointIdOptions::Uuid(s) => s, + PointIdOptions::Num(n) => n.to_string(), + }) + .unwrap_or_default(); + + Some(SearchResult { + id, + score: p.score, + payload: EmbedPayload { + entity_type, + entity_id, + text, + extra, + }, + }) + }) + .collect()) + } + + pub async fn search_with_filter( + &self, + vector: &[f32], + entity_type: &str, + limit: usize, + filter: Filter, + ) -> crate::Result> { + let collection_name = Self::collection_name(entity_type); + + let search = SearchPointsBuilder::new(collection_name, vector.to_vec(), limit as u64) + .with_payload(true) + .filter(filter) + .build(); + + let results = self + .inner + .search_points(search) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + Ok(results + .result + .into_iter() + .filter_map(|p| { + let entity_type = p + .payload + .get(&"entity_type".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let entity_id = p + .payload + .get(&"entity_id".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let text = p + .payload + .get(&"text".to_string()) + .map(Self::extract_string) + .unwrap_or_default(); + + let extra = p + .payload + .get(&"extra".to_string()) + .and_then(|v| Some(Self::extract_string(v))) + .and_then(|s| serde_json::from_str::(&s).ok()); + + let id = + p.id.and_then(|id| id.point_id_options) + .map(|opts| match opts { + PointIdOptions::Uuid(s) => s, + PointIdOptions::Num(n) => n.to_string(), + }) + .unwrap_or_default(); + + Some(SearchResult { + id, + score: p.score, + payload: EmbedPayload { + entity_type, + entity_id, + text, + extra, + }, + }) + }) + .collect()) + } + + pub async fn delete_by_filter(&self, entity_type: &str, entity_id: &str) -> crate::Result<()> { + let collection_name = Self::collection_name(entity_type); + + let filter = Filter { + must: vec![Condition { + condition_one_of: Some(ConditionOneOf::Field(FieldCondition { + key: "entity_id".to_string(), + r#match: Some(Match { + match_value: Some(MatchValue::Keyword(entity_id.to_string())), + }), + ..Default::default() + })), + }], + ..Default::default() + }; + + let delete = DeletePointsBuilder::new(collection_name) + .points(filter) + .build(); + + self.inner + .delete_points(delete) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + + Ok(()) + } + + pub async fn delete_collection(&self, entity_type: &str) -> crate::Result<()> { + let name = Self::collection_name(entity_type); + self.inner + .delete_collection(name) + .await + .map_err(|e| crate::AgentError::Qdrant(e.to_string()))?; + Ok(()) + } + + pub async fn ensure_memory_collection(&self, dimensions: u64) -> crate::Result<()> { + self.ensure_collection("memory", dimensions).await + } + + pub async fn ensure_skill_collection(&self, dimensions: u64) -> crate::Result<()> { + self.ensure_collection("skill", dimensions).await + } + + pub async fn search_memory( + &self, + vector: &[f32], + limit: usize, + ) -> crate::Result> { + self.search(vector, "memory", limit).await + } + + pub async fn search_skill( + &self, + vector: &[f32], + limit: usize, + ) -> crate::Result> { + self.search(vector, "skill", limit).await + } +} diff --git a/libs/agent/embed/service.rs b/libs/agent/embed/service.rs new file mode 100644 index 0000000..971f540 --- /dev/null +++ b/libs/agent/embed/service.rs @@ -0,0 +1,232 @@ +use async_trait::async_trait; +use qdrant_client::qdrant::Filter; +use sea_orm::DatabaseConnection; +use std::sync::Arc; + +use super::client::{EmbedClient, EmbedPayload, EmbedVector, SearchResult}; + +#[async_trait] +pub trait Embeddable { + fn entity_type(&self) -> &'static str; + fn to_text(&self) -> String; + fn entity_id(&self) -> String; +} + +pub struct EmbedService { + client: Arc, + db: DatabaseConnection, + model_name: String, + dimensions: u64, +} + +impl EmbedService { + pub fn new( + client: EmbedClient, + db: DatabaseConnection, + model_name: String, + dimensions: u64, + ) -> Self { + Self { + client: Arc::new(client), + db, + model_name, + dimensions, + } + } + + pub async fn embed_issue( + &self, + id: &str, + title: &str, + body: Option<&str>, + ) -> crate::Result<()> { + let text = match body { + Some(b) if !b.is_empty() => format!("{}\n\n{}", title, b), + _ => title.to_string(), + }; + + let vector = self.client.embed_text(&text, &self.model_name).await?; + + let point = EmbedVector { + id: id.to_string(), + vector, + payload: EmbedPayload { + entity_type: "issue".to_string(), + entity_id: id.to_string(), + text, + extra: None, + }, + }; + + self.client.upsert(vec![point]).await + } + + pub async fn embed_repo( + &self, + id: &str, + name: &str, + description: Option<&str>, + ) -> crate::Result<()> { + let text = match description { + Some(d) if !d.is_empty() => format!("{}: {}", name, d), + _ => name.to_string(), + }; + + let vector = self.client.embed_text(&text, &self.model_name).await?; + + let point = EmbedVector { + id: id.to_string(), + vector, + payload: EmbedPayload { + entity_type: "repo".to_string(), + entity_id: id.to_string(), + text, + extra: None, + }, + }; + + self.client.upsert(vec![point]).await + } + + pub async fn embed_issues( + &self, + items: Vec, + ) -> crate::Result<()> { + if items.is_empty() { + return Ok(()); + } + + let texts: Vec = items.iter().map(|i| i.to_text()).collect(); + let embeddings = self.client.embed_batch(&texts, &self.model_name).await?; + + let points: Vec = items + .into_iter() + .zip(embeddings.into_iter()) + .map(|(item, vector)| EmbedVector { + id: item.entity_id(), + vector, + payload: EmbedPayload { + entity_type: item.entity_type().to_string(), + entity_id: item.entity_id(), + text: item.to_text(), + extra: None, + }, + }) + .collect(); + + self.client.upsert(points).await + } + + pub async fn search_issues( + &self, + query: &str, + limit: usize, + ) -> crate::Result> { + self.client + .search(query, "issue", &self.model_name, limit) + .await + } + + pub async fn search_repos( + &self, + query: &str, + limit: usize, + ) -> crate::Result> { + self.client + .search(query, "repo", &self.model_name, limit) + .await + } + + pub async fn search_issues_filtered( + &self, + query: &str, + limit: usize, + filter: Filter, + ) -> crate::Result> { + self.client + .search_with_filter(query, "issue", &self.model_name, limit, filter) + .await + } + + pub async fn delete_issue_embedding(&self, issue_id: &str) -> crate::Result<()> { + self.client.delete_by_entity_id("issue", issue_id).await + } + + pub async fn delete_repo_embedding(&self, repo_id: &str) -> crate::Result<()> { + self.client.delete_by_entity_id("repo", repo_id).await + } + + pub async fn ensure_collections(&self) -> crate::Result<()> { + self.client + .ensure_collection("issue", self.dimensions) + .await?; + self.client + .ensure_collection("repo", self.dimensions) + .await?; + self.client.ensure_skill_collection(self.dimensions).await?; + self.client.ensure_memory_collection(self.dimensions).await?; + Ok(()) + } + + pub fn db(&self) -> &DatabaseConnection { + &self.db + } + + pub fn client(&self) -> &Arc { + &self.client + } + + /// Embed a project skill into Qdrant for vector-based semantic search. + pub async fn embed_skill( + &self, + skill_id: i64, + name: &str, + description: Option<&str>, + content: &str, + project_uuid: &str, + ) -> crate::Result<()> { + let desc = description.unwrap_or_default(); + let id = skill_id.to_string(); + self.client + .embed_skill(&id, name, desc, content, project_uuid) + .await + } + + /// Search skills by semantic similarity within a project. + pub async fn search_skills( + &self, + query: &str, + project_uuid: &str, + limit: usize, + ) -> crate::Result> { + self.client + .search_skills(query, &self.model_name, project_uuid, limit) + .await + } + + /// Embed a conversation message into Qdrant as a memory vector. + pub async fn embed_memory( + &self, + message_id: i64, + text: &str, + room_id: &str, + user_id: Option<&str>, + ) -> crate::Result<()> { + let id = message_id.to_string(); + self.client + .embed_memory(&id, text, room_id, user_id) + .await + } + + /// Search past conversation messages by semantic similarity within a room. + pub async fn search_memories( + &self, + query: &str, + room_id: &str, + limit: usize, + ) -> crate::Result> { + self.client + .search_memories(query, &self.model_name, room_id, limit) + .await + } +} diff --git a/libs/agent/error.rs b/libs/agent/error.rs new file mode 100644 index 0000000..51320b7 --- /dev/null +++ b/libs/agent/error.rs @@ -0,0 +1,31 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum AgentError { + #[error("openai error: {0}")] + OpenAi(String), + #[error("qdrant error: {0}")] + Qdrant(String), + #[error("internal error: {0}")] + Internal(String), +} + +pub type Result = std::result::Result; + +impl From for AgentError { + fn from(e: async_openai::error::OpenAIError) -> Self { + AgentError::OpenAi(e.to_string()) + } +} + +impl From for AgentError { + fn from(e: qdrant_client::QdrantError) -> Self { + AgentError::Qdrant(e.to_string()) + } +} + +impl From for AgentError { + fn from(e: sea_orm::DbErr) -> Self { + AgentError::Internal(e.to_string()) + } +} diff --git a/libs/agent/lib.rs b/libs/agent/lib.rs new file mode 100644 index 0000000..2efc8bf --- /dev/null +++ b/libs/agent/lib.rs @@ -0,0 +1,36 @@ +pub mod chat; +pub mod client; +pub mod compact; +pub mod embed; +pub mod error; +pub mod perception; +pub mod react; +pub mod task; +pub mod tokent; +pub mod tool; +pub use task::TaskService; +pub use tokent::{TokenUsage, resolve_usage}; +pub use perception::{PerceptionService, SkillContext, SkillEntry, ToolCallEvent}; + +use async_openai::Client; +use async_openai::config::OpenAIConfig; +pub use chat::{ + AiContextSenderType, AiRequest, AiStreamChunk, ChatService, Mention, RoomMessageContext, + StreamCallback, +}; +pub use client::{AiCallResponse, AiClientConfig, call_with_params, call_with_retry}; +pub use compact::{CompactConfig, CompactLevel, CompactService, CompactSummary, MessageSummary}; +pub use embed::{EmbedClient, EmbedService, QdrantClient, SearchResult}; +pub use error::{AgentError, Result}; +pub use react::{ + Hook, HookAction, NoopHook, ReactAgent, ReactConfig, ReactStep, ToolCallAction, TracingHook, +}; +pub use tool::{ + ToolCall, ToolCallResult, ToolContext, ToolDefinition, ToolError, ToolExecutor, ToolParam, + ToolRegistry, ToolResult, ToolSchema, +}; + +#[derive(Clone)] +pub struct AgentService { + pub client: Client, +} diff --git a/libs/agent/perception/active.rs b/libs/agent/perception/active.rs new file mode 100644 index 0000000..4f56c2b --- /dev/null +++ b/libs/agent/perception/active.rs @@ -0,0 +1,167 @@ +//! Active skill awareness — proactive skill retrieval triggered by explicit user intent. +//! +//! The agent proactively loads a specific skill when the user explicitly references it +//! in their message. Patterns include: +//! +//! - Direct slug mention: "用 code-review", "使用 skill:code-review", "@code-review" +//! - Task-based invocation: "帮我 code review", "做一次 security scan" +//! - Intent keywords with skill context: "review 我的 PR", "scan for bugs" +//! +//! This is the highest-priority perception mode — if the user explicitly asks for a +//! skill, it always gets injected regardless of auto/passive scores. + +use super::{SkillContext, SkillEntry}; +use once_cell::sync::Lazy; +use regex::Regex; + +/// Active skill awareness that detects explicit skill invocations in user messages. +#[derive(Debug, Clone, Default)] +pub struct ActiveSkillAwareness; + +impl ActiveSkillAwareness { + pub fn new() -> Self { + Self + } + + /// Detect if the user explicitly invoked a skill in their message. + /// + /// Returns the first matching skill, or `None` if no explicit invocation is found. + /// + /// Matching patterns: + /// - `用 ` / `使用 ` (Chinese: "use / apply ") + /// - `skill:` (explicit namespace) + /// - `@` (GitHub-style mention) + /// - `帮我 ` / ` 帮我` (Chinese: "help me ") + /// - `做一次 ` / `进行一次 ` (Chinese: "do a ") + pub fn detect(&self, input: &str, skills: &[SkillEntry]) -> Option { + let input_lower = input.to_lowercase(); + + // Try each matching pattern in priority order. + if let Some(skill) = self.match_by_prefix_pattern(&input_lower, skills) { + return Some(skill); + } + + // Try matching by skill name (for natural language invocations). + if let Some(skill) = self.match_by_name(&input_lower, skills) { + return Some(skill); + } + + // Try matching by slug substring in the message. + self.match_by_slug_substring(&input_lower, skills) + } + + /// Pattern: "用 code-review", "使用 skill:xxx", "@xxx", "skill:xxx" + fn match_by_prefix_pattern(&self, input: &str, skills: &[SkillEntry]) -> Option { + // Pattern 1: 英文 slug 前缀 "use ", "using ", "apply ", "with " + static USE_PAT: Lazy = + Lazy::new(|| Regex::new(r"(?i)^\s*(?:use|using|apply|with)\s+([a-z0-9/_-]+)").unwrap()); + + if let Some(caps) = USE_PAT.captures(input) { + let slug = caps.get(1)?.as_str().trim(); + return self.find_skill_by_slug(slug, skills); + } + + // Pattern 2: skill:xxx + static SKILL_COLON_PAT: Lazy = + Lazy::new(|| Regex::new(r"(?i)skill\s*:\s*([a-z0-9/_-]+)").unwrap()); + + if let Some(caps) = SKILL_COLON_PAT.captures(input) { + let slug = caps.get(1)?.as_str().trim(); + return self.find_skill_by_slug(slug, skills); + } + + // Pattern 3: @xxx (mention style) + static AT_PAT: Lazy = + Lazy::new(|| Regex::new(r"@([a-z0-9][a-z0-9_/-]*[a-z0-9])").unwrap()); + + if let Some(caps) = AT_PAT.captures(input) { + let slug = caps.get(1)?.as_str().trim(); + return self.find_skill_by_slug(slug, skills); + } + + // Pattern 4: 帮我 xxx, 做一个 xxx, 进行 xxx, 做 xxx + static ZH_PAT: Lazy = Lazy::new( + || Regex::new(r"(?ix)[\u4e00-\u9fff]+\s+(?:帮我|做一个|进行一次|做|使用|用)\s+([a-z0-9][a-z0-9_/-]{0,30})") + .unwrap(), + ); + + if let Some(caps) = ZH_PAT.captures(input) { + let slug_or_name = caps.get(1)?.as_str().trim(); + return self + .find_skill_by_slug(slug_or_name, skills) + .or_else(|| self.find_skill_by_name(slug_or_name, skills)); + } + + None + } + + /// Match by skill name in natural language (e.g., "code review" → "code-review") + fn match_by_name(&self, input: &str, skills: &[SkillEntry]) -> Option { + for skill in skills { + // Normalize skill name to a search pattern: "Code Review" -> "code review" + let name_lower = skill.name.to_lowercase(); + + // Direct substring match (the skill name appears in the input). + if input.contains(&name_lower) { + return Some(SkillContext { + label: format!("Active skill: {}", skill.name), + content: format!("# {} (actively invoked)\n\n{}", skill.name, skill.content), + }); + } + + // Try removing hyphens/underscores: "code-review" contains "code review" + let normalized_name = name_lower.replace(['-', '_'], " "); + if input.contains(&normalized_name) { + return Some(SkillContext { + label: format!("Active skill: {}", skill.name), + content: format!("# {} (actively invoked)\n\n{}", skill.name, skill.content), + }); + } + } + None + } + + /// Match by slug substring anywhere in the message. + fn match_by_slug_substring(&self, input: &str, skills: &[SkillEntry]) -> Option { + // Remove common command words to isolate the slug. + let cleaned = input + .replace("please ", "") + .replace("帮我", "") + .replace("帮我review", "") + .replace("帮我 code review", "") + .replace("帮我review", ""); + + for skill in skills { + let slug = skill.slug.to_lowercase(); + // Check if the slug (or any segment of it) appears as a word. + if cleaned.contains(&slug) || slug.split('/').any(|seg| cleaned.contains(seg) && seg.len() > 3) + { + return Some(SkillContext { + label: format!("Active skill: {}", skill.name), + content: format!("# {} (actively invoked)\n\n{}", skill.name, skill.content), + }); + } + } + None + } + + fn find_skill_by_slug(&self, slug: &str, skills: &[SkillEntry]) -> Option { + let slug_lower = slug.to_lowercase(); + skills.iter().find(|s| s.slug.to_lowercase() == slug_lower).map(|skill| { + SkillContext { + label: format!("Active skill: {}", skill.name), + content: format!("# {} (actively invoked)\n\n{}", skill.name, skill.content), + } + }) + } + + fn find_skill_by_name(&self, name: &str, skills: &[SkillEntry]) -> Option { + let name_lower = name.to_lowercase(); + skills.iter().find(|s| s.name.to_lowercase() == name_lower).map(|skill| { + SkillContext { + label: format!("Active skill: {}", skill.name), + content: format!("# {} (actively invoked)\n\n{}", skill.name, skill.content), + } + }) + } +} diff --git a/libs/agent/perception/auto.rs b/libs/agent/perception/auto.rs new file mode 100644 index 0000000..68e1aa0 --- /dev/null +++ b/libs/agent/perception/auto.rs @@ -0,0 +1,178 @@ +//! Auto skill awareness — background scanning for skill relevance. +//! +//! Periodically (or on-demand) scans the conversation context to identify +//! which enabled skills might be relevant, based on keyword overlap between +//! the skill's metadata (name, description, content snippets) and the +//! conversation text. +//! +//! This is the "ambient awareness" mode — the agent is always aware of +//! which skills might apply without the user explicitly invoking them. + +use super::{SkillContext, SkillEntry}; + +/// Auto skill awareness config. +#[derive(Debug, Clone)] +pub struct AutoSkillAwareness { + /// Minimum keyword overlap score (0.0–1.0) to consider a skill relevant. + min_score: f32, + /// Maximum number of skills to inject via auto-awareness. + max_skills: usize, +} + +impl Default for AutoSkillAwareness { + fn default() -> Self { + Self { + min_score: 0.15, + max_skills: 3, + } + } +} + +impl AutoSkillAwareness { + pub fn new(min_score: f32, max_skills: usize) -> Self { + Self { min_score, max_skills } + } + + /// Detect relevant skills by scoring keyword overlap between skill metadata + /// and the conversation text (current input + recent history). + /// + /// Returns up to `max_skills` skills sorted by relevance score. + pub async fn detect( + &self, + current_input: &str, + history: &[String], + skills: &[SkillEntry], + ) -> Vec { + if skills.is_empty() { + return Vec::new(); + } + + // Build a combined corpus from current input and recent history (last 5 messages). + let history_text: String = history + .iter() + .rev() + .take(5) + .map(|s| s.as_str()) + .collect::>() + .join(" "); + + let corpus = format!("{} {}", current_input, history_text).to_lowercase(); + + // Extract keywords from the corpus (split on whitespace + strip punctuation). + let corpus_keywords = Self::extract_keywords(&corpus); + + if corpus_keywords.is_empty() { + return Vec::new(); + } + + // Score each skill. + let mut scored: Vec<_> = skills + .iter() + .map(|skill| { + let score = Self::score_skill(&corpus_keywords, skill); + (score, skill) + }) + .filter(|(score, _)| *score >= self.min_score) + .collect(); + + // Sort descending by score. + scored.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal)); + + scored + .into_iter() + .take(self.max_skills) + .map(|(_, skill)| { + // Extract a short relevant excerpt around the first keyword match. + let excerpt = Self::best_excerpt(&corpus, skill); + SkillContext { + label: format!("Auto skill: {}", skill.name), + content: excerpt, + } + }) + .collect() + } + + /// Extract meaningful keywords from text. + fn extract_keywords(text: &str) -> Vec { + // Common English + Chinese stopwords to filter out. + const STOPWORDS: &[&str] = &[ + "the", "a", "an", "is", "are", "was", "were", "be", "been", "being", + "have", "has", "had", "do", "does", "did", "will", "would", "could", + "should", "may", "might", "can", "to", "of", "in", "for", "on", "with", + "at", "by", "from", "as", "or", "and", "but", "if", "not", "no", "so", + "this", "that", "these", "those", "it", "its", "i", "you", "he", "she", + "we", "they", "what", "which", "who", "when", "where", "why", "how", + "all", "each", "every", "both", "few", "more", "most", "other", "some", + "such", "only", "own", "same", "than", "too", "very", "just", "also", + "now", "here", "there", "then", "once", "again", "always", "ever", + "的", "了", "是", "在", "我", "你", "他", "她", "它", "们", "这", "那", + "个", "一", "上", "下", "来", "去", "说", "看", "想", "要", "会", "能", + "和", "与", "或", "不", "就", "也", "都", "还", "从", "到", "把", "被", + "让", "给", "用", "做", "为", "以", "及", "等", "很", "太", "比较", + ]; + + text.split_whitespace() + .filter(|w| { + let w_clean = w.trim_matches(|c: char| !c.is_alphanumeric()); + w_clean.len() >= 3 && !STOPWORDS.contains(&w_clean) + }) + .map(|w| w.to_lowercase()) + .collect() + } + + /// Score a skill by keyword overlap between the corpus keywords and the skill's + /// name + description + content (first 500 chars). + fn score_skill(corpus_keywords: &[String], skill: &SkillEntry) -> f32 { + let skill_text = format!( + "{} {}", + skill.name, + skill.description.as_deref().unwrap_or("") + ); + let skill_text = skill_text.to_lowercase(); + let skill_keywords = Self::extract_keywords(&skill_text); + let content_sample = skill.content.chars().take(500).collect::().to_lowercase(); + let content_keywords = Self::extract_keywords(&content_sample); + let all_skill_keywords = [&skill_keywords[..], &content_keywords[..]].concat(); + + if all_skill_keywords.is_empty() { + return 0.0; + } + + let overlap: usize = corpus_keywords + .iter() + .filter(|kw| all_skill_keywords.iter().any(|sk| sk.contains(kw.as_str()) || kw.as_str().contains(sk.as_str()))) + .count(); + + overlap as f32 / all_skill_keywords.len().max(1) as f32 + } + + /// Extract the best excerpt from skill content — the paragraph most relevant to the corpus. + fn best_excerpt(corpus: &str, skill: &SkillEntry) -> String { + // Try to find a relevant paragraph: one that shares the most keywords with corpus. + let corpus_kws = Self::extract_keywords(corpus); + + let best_para = skill + .content + .split('\n') + .filter(|para| !para.trim().is_empty()) + .map(|para| { + let para_kws = Self::extract_keywords(¶.to_lowercase()); + let overlap: usize = corpus_kws + .iter() + .filter(|kw| para_kws.iter().any(|pk| pk.contains(kw.as_str()) || kw.as_str().contains(pk.as_str()))) + .count(); + (overlap, para) + }) + .filter(|(score, _)| *score > 0) + .max_by_key(|(score, _)| *score); + + if let Some((_, para)) = best_para { + // Return the best paragraph with a header. + format!("# {} (auto-matched)\n\n{}", skill.name, para.trim()) + } else { + // Fallback: use first 300 chars of content as excerpt. + let excerpt = skill.content.chars().take(300).collect::(); + format!("# {} (auto-matched)\n\n{}...", skill.name, excerpt.trim()) + } + } +} diff --git a/libs/agent/perception/mod.rs b/libs/agent/perception/mod.rs new file mode 100644 index 0000000..306c351 --- /dev/null +++ b/libs/agent/perception/mod.rs @@ -0,0 +1,131 @@ +//! Skill perception system for the AI agent. +//! +//! Provides three perception modes for injecting relevant skills into the agent's context: +//! +//! - **Auto (自动感知)**: Background awareness that scans conversation content for skill +//! relevance based on keyword matching and semantic similarity. +//! +//! - **Active (主动感知)**: Proactive skill retrieval triggered by explicit user intent, +//! such as mentioning a skill slug directly in the message. Both keyword and vector-based. +//! +//! - **Passive (被动感知)**: Reactive skill retrieval triggered by tool-call events, +//! such as when the agent mentions a specific skill in its reasoning. Both keyword and +//! vector-based. + +pub mod active; +pub mod auto; +pub mod passive; +pub mod vector; + +pub use active::ActiveSkillAwareness; +pub use auto::AutoSkillAwareness; +pub use passive::PassiveSkillAwareness; +pub use vector::{VectorActiveAwareness, VectorPassiveAwareness}; + +use async_openai::types::chat::ChatCompletionRequestMessage; + +/// A chunk of skill context ready to be injected into the message list. +#[derive(Debug, Clone)] +pub struct SkillContext { + /// Human-readable label shown to the AI, e.g. "Active skill: code-review" + pub label: String, + /// The actual skill content to inject. + pub content: String, +} + +/// Converts skill context into a system message for injection. +impl SkillContext { + pub fn to_system_message(self) -> ChatCompletionRequestMessage { + use async_openai::types::chat::{ + ChatCompletionRequestSystemMessage, + ChatCompletionRequestSystemMessageContent, + }; + ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage { + content: ChatCompletionRequestSystemMessageContent::Text(format!( + "[{}]\n{}", + self.label, self.content + )), + ..Default::default() + }) + } +} + +/// Unified perception service combining all three modes. +#[derive(Debug, Clone)] +pub struct PerceptionService { + pub auto: AutoSkillAwareness, + pub active: ActiveSkillAwareness, + pub passive: PassiveSkillAwareness, +} + +impl Default for PerceptionService { + fn default() -> Self { + Self { + auto: AutoSkillAwareness::default(), + active: ActiveSkillAwareness::default(), + passive: PassiveSkillAwareness::default(), + } + } +} + +impl PerceptionService { + /// Inject relevant skill context into the message list based on current conversation state. + /// + /// - **auto**: Scans the current input and conversation history for skill-relevant keywords + /// and injects matching skills that are enabled. + /// - **active**: Checks if the user explicitly invoked a skill by slug (e.g. "用 code-review") + /// and injects it. + /// - **passive**: Checks if any tool-call events or prior observations mention a skill + /// slug and injects the matching skill. + /// + /// Returns a list of system messages to prepend to the conversation. + pub async fn inject_skills( + &self, + input: &str, + history: &[String], + tool_calls: &[ToolCallEvent], + enabled_skills: &[SkillEntry], + ) -> Vec { + let mut results = Vec::new(); + + // Active: explicit skill invocation (highest priority) + if let Some(skill) = self.active.detect(input, enabled_skills) { + results.push(skill); + } + + // Passive: triggered by tool-call events + for tc in tool_calls { + if let Some(skill) = self.passive.detect(tc, enabled_skills) { + if !results.iter().any(|r: &SkillContext| r.label == skill.label) { + results.push(skill); + } + } + } + + // Auto: keyword-based relevance matching + let auto_results = self.auto.detect(input, history, enabled_skills).await; + for skill in auto_results { + if !results.iter().any(|r: &SkillContext| r.label == skill.label) { + results.push(skill); + } + } + + results + } +} + +/// A tool-call event used for passive skill detection. +#[derive(Debug, Clone)] +pub struct ToolCallEvent { + pub tool_name: String, + pub arguments: String, +} + +/// A skill entry from the database, used for matching. +#[derive(Debug, Clone)] +pub struct SkillEntry { + pub slug: String, + pub name: String, + pub description: Option, + pub content: String, +} diff --git a/libs/agent/perception/passive.rs b/libs/agent/perception/passive.rs new file mode 100644 index 0000000..3de62b7 --- /dev/null +++ b/libs/agent/perception/passive.rs @@ -0,0 +1,144 @@ +//! Passive skill awareness — reactive skill retrieval triggered by events. +//! +//! The agent passively activates a skill when its slug or name appears in: +//! +//! - Tool call arguments (e.g., a tool is called with a repository name that matches a "git" skill) +//! - Tool call results / observations (e.g., a linter reports issues matching a "code-review" skill) +//! - System events emitted during the agent loop (e.g., "PR opened" → "pr-review" skill) +//! +//! This is lower-priority than active but higher than auto — it's triggered by +//! specific events rather than ambient relevance scoring. + +use super::{SkillContext, SkillEntry, ToolCallEvent}; + +/// Passive skill awareness triggered by tool-call and event context. +#[derive(Debug, Clone, Default)] +pub struct PassiveSkillAwareness; + +impl PassiveSkillAwareness { + pub fn new() -> Self { + Self + } + + /// Detect skill activation from tool-call events. + /// + /// The agent can passively "wake up" a skill when: + /// - A tool call's name or arguments contain a skill slug or keyword + /// - A tool call result mentions a skill name + /// + /// This is primarily driven by tool naming conventions and argument patterns. + /// For example, a tool named `git_diff` might passively activate a `git` skill. + pub fn detect(&self, event: &ToolCallEvent, skills: &[SkillEntry]) -> Option { + let tool_name = event.tool_name.to_lowercase(); + let args = event.arguments.to_lowercase(); + + for skill in skills { + let slug = skill.slug.to_lowercase(); + let name = skill.name.to_lowercase(); + + // Trigger 1: Tool name contains skill slug segment. + // e.g., tool "git_blame" → skill "git/*" activates + if Self::slug_in_text(&tool_name, &slug) { + return Some(Self::context_from_skill(skill, "tool invocation")); + } + + // Trigger 2: Tool arguments contain skill slug or name keywords. + // e.g., arguments mention "security" → "security/scan" skill + if Self::slug_in_text(&args, &slug) || Self::keyword_match(&args, &name) { + return Some(Self::context_from_skill(skill, "tool arguments")); + } + + // Trigger 3: Common tool prefixes that map to skill categories. + if let Some(cat_skill) = Self::match_tool_category(&tool_name, skills) { + return Some(cat_skill); + } + } + + None + } + + /// Detect skill activation from a raw text observation (e.g., tool result text). + pub fn detect_from_text(&self, text: &str, skills: &[SkillEntry]) -> Option { + let text_lower = text.to_lowercase(); + + for skill in skills { + let slug = skill.slug.to_lowercase(); + let name = skill.name.to_lowercase(); + + if Self::slug_in_text(&text_lower, &slug) || Self::keyword_match(&text_lower, &name) { + return Some(Self::context_from_skill(skill, "observation match")); + } + } + + None + } + + /// Match common tool name prefixes to skill categories. + fn match_tool_category(tool_name: &str, skills: &[SkillEntry]) -> Option { + let category_map = [ + ("git_", "git"), + ("repo_", "repo"), + ("issue_", "issue"), + ("pr_", "pull_request"), + ("pull_request_", "pull_request"), + ("code_review", "code-review"), + ("security_scan", "security"), + ("linter", "linter"), + ("test_", "testing"), + ("deploy_", "deployment"), + ("docker_", "docker"), + ("k8s_", "kubernetes"), + ("db_", "database"), + ("sql_", "database"), + ]; + + for (prefix, category) in category_map { + if tool_name.starts_with(prefix) { + if let Some(skill) = skills.iter().find(|s| { + s.slug.to_lowercase().contains(category) + || s.name.to_lowercase().contains(category) + }) { + return Some(Self::context_from_skill(skill, "tool category match")); + } + } + } + + None + } + + /// True if the slug (or a significant segment of it) appears in the text. + fn slug_in_text(text: &str, slug: &str) -> bool { + text.contains(slug) + || slug + .split('/') + .filter(|seg| seg.len() >= 3) + .any(|seg| text.contains(seg)) + } + + /// Match skill name keywords against the text (handles multi-word names). + fn keyword_match(text: &str, name: &str) -> bool { + // For multi-word names, require all significant words to appear. + let significant: Vec<_> = name + .split(|c: char| !c.is_alphanumeric()) + .filter(|w| w.len() >= 3) + .collect(); + + if significant.len() >= 2 { + significant.iter().all(|w| text.contains(*w)) + } else { + significant.first().map_or(false, |w| text.contains(w)) + } + } + + fn context_from_skill(skill: &SkillEntry, trigger: &str) -> SkillContext { + SkillContext { + label: format!("Passive skill: {} ({})", skill.name, trigger), + content: format!( + "# {} (passive — {})\n\n{}", + skill.name, + trigger, + skill.content + ), + } + } +} diff --git a/libs/agent/perception/vector.rs b/libs/agent/perception/vector.rs new file mode 100644 index 0000000..03521e3 --- /dev/null +++ b/libs/agent/perception/vector.rs @@ -0,0 +1,163 @@ +//! Vector-based skill and memory awareness using Qdrant embeddings. +//! +//! Leverages semantic similarity search to find relevant skills and conversation +//! memories based on vector embeddings. This is more powerful than keyword matching +//! because it captures semantic meaning, not just surface-level word overlap. +//! +//! - **VectorActiveAwareness**: Searches skills by semantic similarity when the user +//! sends a message, finding skills relevant to the conversation topic. +//! +//! - **VectorPassiveAwareness**: Searches past conversation memories to provide relevant +//! historical context when similar topics arise, based on tool-call patterns. + +use async_openai::types::chat::{ + ChatCompletionRequestMessage, ChatCompletionRequestSystemMessage, + ChatCompletionRequestSystemMessageContent, +}; +use crate::embed::EmbedService; +use crate::perception::SkillContext; + +/// Maximum relevant memories to inject. +const MAX_MEMORY_RESULTS: usize = 3; +/// Minimum similarity score (0.0–1.0) for memories. +const MIN_MEMORY_SCORE: f32 = 0.72; +/// Maximum skills to return from vector search. +const MAX_SKILL_RESULTS: usize = 3; +/// Minimum similarity score for skills. +const MIN_SKILL_SCORE: f32 = 0.70; + +/// Vector-based active skill awareness — semantic search for relevant skills. +/// +/// When the user sends a message, this awareness mode searches the Qdrant skill index +/// for skills whose content is semantically similar to the message, even if no keywords +/// match directly. This captures intent beyond explicit skill mentions. +#[derive(Debug, Clone)] +pub struct VectorActiveAwareness { + pub max_skills: usize, + pub min_score: f32, +} + +impl Default for VectorActiveAwareness { + fn default() -> Self { + Self { + max_skills: MAX_SKILL_RESULTS, + min_score: MIN_SKILL_SCORE, + } + } +} + +impl VectorActiveAwareness { + /// Search for skills semantically relevant to the user's input. + /// + /// Uses Qdrant vector search within the given project to find skills whose + /// embedded content is similar to `query`. Only returns results above `min_score`. + pub async fn detect( + &self, + embed_service: &EmbedService, + query: &str, + project_uuid: &str, + ) -> Vec { + let results = match embed_service + .search_skills(query, project_uuid, self.max_skills) + .await + { + Ok(results) => results, + Err(_) => return Vec::new(), + }; + + results + .into_iter() + .filter(|r| r.score >= self.min_score) + .map(|r| { + let name = r + .payload + .extra + .as_ref() + .and_then(|v| v.get("name")) + .and_then(|v| v.as_str()) + .unwrap_or("skill") + .to_string(); + SkillContext { + label: format!("[Vector] Skill: {}", name), + content: format!( + "[Relevant skill (score {:.2})]\n{}", + r.score, r.payload.text + ), + } + }) + .collect() + } +} + +/// Vector-based passive memory awareness — retrieve relevant past context. +/// +/// When the agent encounters a topic (via tool-call or observation), this awareness +/// searches past conversation messages to find semantically similar prior discussions. +/// This gives the agent memory of how similar situations were handled before. +#[derive(Debug, Clone)] +pub struct VectorPassiveAwareness { + pub max_memories: usize, + pub min_score: f32, +} + +impl Default for VectorPassiveAwareness { + fn default() -> Self { + Self { + max_memories: MAX_MEMORY_RESULTS, + min_score: MIN_MEMORY_SCORE, + } + } +} + +impl VectorPassiveAwareness { + /// Search for past conversation messages semantically similar to the current context. + /// + /// Uses Qdrant to find memories within the same room that share semantic similarity + /// with the given query (usually the current input or a tool-call description). + /// High-scoring results suggest prior discussions on this topic. + pub async fn detect( + &self, + embed_service: &EmbedService, + query: &str, + room_id: &str, + ) -> Vec { + let results = match embed_service + .search_memories(query, room_id, self.max_memories) + .await + { + Ok(results) => results, + Err(_) => return Vec::new(), + }; + + results + .into_iter() + .filter(|r| r.score >= self.min_score) + .map(|r| MemoryContext { + score: r.score, + content: r.payload.text, + }) + .collect() + } +} + +/// A retrieved memory entry from vector search. +#[derive(Debug, Clone)] +pub struct MemoryContext { + /// Similarity score (0.0–1.0). + pub score: f32, + /// The text of the past conversation message. + pub content: String, +} + +impl MemoryContext { + /// Format as a system message for injection into the agent context. + pub fn to_system_message(self) -> ChatCompletionRequestMessage { + ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage { + content: ChatCompletionRequestSystemMessageContent::Text(format!( + "[Relevant memory (score {:.2})]\n{}", + self.score, self.content + )), + ..Default::default() + }) + } +} diff --git a/libs/agent/react/hooks.rs b/libs/agent/react/hooks.rs new file mode 100644 index 0000000..b61d405 --- /dev/null +++ b/libs/agent/react/hooks.rs @@ -0,0 +1,130 @@ +//! Observability hooks for the ReAct agent loop. +//! +//! Hooks allow injecting custom behavior (logging, tracing, filtering, termination) +//! at each step of the reasoning loop without coupling to the core agent logic. +//! +//! Inspired by rig's `PromptHook` trait. +//! +//! # Example +//! +//! ```ignore +//! #[derive(Clone)] +//! struct MyHook; +//! +//! impl Hook for MyHook { +//! async fn on_thought(&self, step: usize, thought: &str) -> HookAction { +//! tracing::info!("[step {}] thinking: {}", step, thought); +//! HookAction::Continue +//! } +//! } +//! +//! let agent = ReactAgent::new(prompt, tools, config).with_hook(MyHook); +//! ``` + +use async_trait::async_trait; + +/// Controls whether the agent loop continues after a hook callback. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum HookAction { + /// Continue processing normally. + Continue, + /// Skip the current step and continue. + Skip, + /// Terminate the loop immediately with the given reason. + Terminate(&'static str), +} + +/// Controls behavior after a tool call hook callback. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ToolCallAction { + /// Execute the tool normally. + Continue, + /// Skip tool execution and inject a custom result. + Skip(String), + /// Terminate the loop with the given reason. + Terminate(&'static str), +} + +/// Default no-op hook that does nothing. +#[derive(Debug, Clone, Copy, Default)] +pub struct NoopHook; + +impl Hook for NoopHook {} + +impl Hook for () {} + +/// A hook that logs everything to stderr using `eprintln`. +/// No external dependencies required. +#[derive(Debug, Clone, Copy, Default)] +pub struct TracingHook; + +impl TracingHook { + pub fn new() -> Self { + Self + } +} + +#[async_trait] +impl Hook for TracingHook { + async fn on_thought(&self, step: usize, thought: &str) -> HookAction { + eprintln!("[step {}] thought: {}", step, thought); + HookAction::Continue + } + + async fn on_tool_call(&self, step: usize, name: &str, args_json: &str) -> ToolCallAction { + eprintln!("[step {}] tool_call: {}({})", step, name, args_json); + ToolCallAction::Continue + } + + async fn on_observation(&self, step: usize, observation: &str) -> HookAction { + eprintln!("[step {}] observation: {}", step, observation); + HookAction::Continue + } + + async fn on_answer(&self, step: usize, answer: &str) -> HookAction { + eprintln!("[step {}] answer: {}", step, answer); + HookAction::Continue + } +} + +/// Hook trait for observing and controlling the ReAct agent loop. +/// +/// Implement this trait to inject custom behavior at each step: +/// - Log thoughts, tool calls, observations, and final answers +/// - Filter or redact sensitive data +/// - Dynamically terminate the loop based on content +/// - Inject custom tool results (e.g., for testing or sandboxing) +/// +/// All methods have default no-op implementations, so you only need to +/// override the ones you care about. +/// +/// The hook is called synchronously during the agent loop. Keep hook +/// callbacks fast — avoid blocking I/O. For heavy work, spawn a task +/// and return immediately. +#[async_trait] +pub trait Hook: Send + Sync { + /// Called when the agent emits a thought/reasoning step. + /// + /// Return `HookAction::Terminate` to stop the loop early. + async fn on_thought(&self, _step: usize, _thought: &str) -> HookAction { + HookAction::Continue + } + + /// Called just before a tool is executed. + /// + /// Return `ToolCallAction::Skip(result)` to skip execution and inject `result` instead. + /// Return `ToolCallAction::Terminate` to stop the loop without executing the tool. + async fn on_tool_call(&self, _step: usize, _name: &str, _args_json: &str) -> ToolCallAction { + ToolCallAction::Continue + } + + /// Called after a tool returns an observation. + async fn on_observation(&self, _step: usize, _observation: &str) -> HookAction { + HookAction::Continue + } + + /// Called when the agent produces a final answer. + async fn on_answer(&self, _step: usize, _answer: &str) -> HookAction { + HookAction::Continue + } +} diff --git a/libs/agent/react/loop_core.rs b/libs/agent/react/loop_core.rs new file mode 100644 index 0000000..747c19e --- /dev/null +++ b/libs/agent/react/loop_core.rs @@ -0,0 +1,439 @@ +//! ReAct (Reasoning + Acting) agent core. + +use async_openai::types::chat::FunctionCall; +use async_openai::types::chat::{ + ChatCompletionMessageToolCall, ChatCompletionMessageToolCalls, + ChatCompletionRequestAssistantMessage, ChatCompletionRequestAssistantMessageContent, + ChatCompletionRequestMessage, ChatCompletionRequestToolMessage, + ChatCompletionRequestToolMessageContent, ChatCompletionRequestUserMessage, + ChatCompletionRequestUserMessageContent, +}; +use uuid::Uuid; + +use std::sync::Arc; + +use crate::call_with_params; +use crate::error::{AgentError, Result}; +use crate::react::hooks::{Hook, HookAction, NoopHook, ToolCallAction}; +use crate::react::types::{Action, ReactConfig, ReactStep}; + +pub use crate::react::types::{ReactConfig as ReActConfig, ReactStep as ReActStep}; + +/// A ReAct agent that performs multi-step tool-augmented reasoning. +#[derive(Clone)] +pub struct ReactAgent { + messages: Vec, + #[allow(dead_code)] + tool_definitions: Vec, + config: ReactConfig, + step_count: usize, + hook: Arc, +} + +impl ReactAgent { + /// Create a new agent with a system prompt and OpenAI tool definitions. + pub fn new( + system_prompt: &str, + tools: Vec, + config: ReactConfig, + ) -> Self { + let messages = vec![ChatCompletionRequestMessage::User( + ChatCompletionRequestUserMessage { + content: ChatCompletionRequestUserMessageContent::Text(system_prompt.to_string()), + ..Default::default() + }, + )]; + Self { + messages, + tool_definitions: tools, + config, + step_count: 0, + hook: Arc::new(NoopHook), + } + } + + /// Add an initial user message to the conversation. + pub fn add_user_message(&mut self, content: &str) { + self.messages.push(ChatCompletionRequestMessage::User( + ChatCompletionRequestUserMessage { + content: ChatCompletionRequestUserMessageContent::Text(content.to_string()), + ..Default::default() + }, + )); + } + + /// Attach a hook to observe and control the agent loop. + /// + /// Hooks can log steps, filter content, inject custom tool results, + /// or terminate the loop early. Multiple `.with_hook()` calls replace + /// the previous hook. + /// + /// # Example + /// + /// ```ignore + /// #[derive(Clone)] + /// struct MyLogger; + /// + /// impl Hook for MyLogger { + /// async fn on_thought(&self, step: usize, thought: &str) -> HookAction { + /// eprintln!("[step {}] thought: {}", step, thought); + /// HookAction::Continue + /// } + /// } + /// + /// let agent = ReactAgent::new(prompt, tools, config).with_hook(MyLogger); + /// ``` + pub fn with_hook(mut self, hook: H) -> Self { + self.hook = Arc::new(hook); + self + } + + /// Run the ReAct loop until a final answer is produced or `max_steps` is reached. + /// + /// Yields streaming chunks via `on_chunk`. Each step produces: + /// - A `ReactStep::Thought` chunk when the AI emits reasoning + /// - A `ReactStep::Action` chunk when the AI emits a tool call + /// - A `ReactStep::Observation` chunk after the tool executes + /// - A `ReactStep::Answer` chunk when the loop terminates with a final answer + /// + /// Hooks are called at each phase (see [Hook]). Return [HookAction::Terminate] + /// from any hook to stop the loop early. + pub async fn run( + &mut self, + model_name: &str, + client_config: &crate::client::AiClientConfig, + mut on_chunk: C, + ) -> Result + where + C: FnMut(ReactStep) + Send, + { + loop { + if self.step_count >= self.config.max_steps { + return Err(AgentError::Internal(format!( + "ReAct agent reached max steps ({})", + self.config.max_steps + ))); + } + + self.step_count += 1; + let step = self.step_count; + + let response = call_with_params( + &self.messages, + model_name, + client_config, + 0.2, // temperature + 4096, // max output tokens + None, + if self.tool_definitions.is_empty() { + None + } else { + Some(self.tool_definitions.as_slice()) + }, + ) + .await?; + + let parsed = parse_react_response(&response.content); + let answer = parsed.answer.clone(); + let action = parsed.action.clone(); + + // Emit thought step. + on_chunk(ReactStep::Thought { + step, + thought: parsed.thought.clone(), + }); + + // Hook: thought + match self.hook.on_thought(step, &parsed.thought).await { + HookAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at thought step: {}", + reason + ))); + } + HookAction::Skip => { + // Skip this step, go directly to answer if present + } + HookAction::Continue => {} + } + + // Final answer — emit and return. + if let Some(ans) = answer { + on_chunk(ReactStep::Answer { + step, + answer: ans.clone(), + }); + + // Hook: answer + match self.hook.on_answer(step, &ans).await { + HookAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at answer step: {}", + reason + ))); + } + _ => {} + } + + return Ok(ans); + } + + // No answer — either do a tool call or fall back. + let Some(act) = action else { + let content = response.content.clone(); + on_chunk(ReactStep::Answer { + step, + answer: content.clone(), + }); + + // Hook: answer (fallback) + match self.hook.on_answer(step, &content).await { + HookAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at fallback answer: {}", + reason + ))); + } + _ => {} + } + + return Ok(content); + }; + + on_chunk(ReactStep::Action { + step, + action: act.clone(), + }); + + let args_json = serde_json::to_string(&act.args).unwrap_or_else(|_| "{}".to_string()); + + // Hook: tool call — can skip or terminate + match self.hook.on_tool_call(step, &act.name, &args_json).await { + ToolCallAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at tool call: {}", + reason + ))); + } + ToolCallAction::Skip(injected_result) => { + // Skip actual execution, inject the provided result + let observation = injected_result; + on_chunk(ReactStep::Observation { + step, + observation: observation.clone(), + }); + + // Hook: observation (injected) + match self.hook.on_observation(step, &observation).await { + HookAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at observation (injected): {}", + reason + ))); + } + _ => {} + } + + // Append observation as a tool message so the model sees it in context. + self.messages.push(ChatCompletionRequestMessage::Tool( + ChatCompletionRequestToolMessage { + tool_call_id: act.id.clone(), + content: ChatCompletionRequestToolMessageContent::Text(observation), + }, + )); + + continue; + } + ToolCallAction::Continue => {} + } + + // Append the assistant message with tool_calls to history. + let assistant_msg = build_tool_call_message(&act); + self.messages.push(assistant_msg); + + // Execute the tool. + let observation = match &self.config.tool_executor { + Some(exec) => { + let result = exec(act.name.clone(), act.args.clone()).await; + match result { + Ok(v) => serde_json::to_string(&v).unwrap_or_else(|_| "null".to_string()), + Err(e) => serde_json::json!({ "error": e }).to_string(), + } + } + None => serde_json::json!({ + "error": format!("no tool executor registered for '{}'", act.name) + }) + .to_string(), + }; + + on_chunk(ReactStep::Observation { + step, + observation: observation.clone(), + }); + + // Hook: observation + match self.hook.on_observation(step, &observation).await { + HookAction::Terminate(reason) => { + return Err(AgentError::Internal(format!( + "hook terminated at observation step: {}", + reason + ))); + } + _ => {} + } + + // Append observation as a tool message so the model sees it in context. + self.messages.push(ChatCompletionRequestMessage::Tool( + ChatCompletionRequestToolMessage { + tool_call_id: act.id.clone(), + content: ChatCompletionRequestToolMessageContent::Text(observation), + }, + )); + } + } + + /// Returns the number of steps executed so far. + pub fn steps(&self) -> usize { + self.step_count + } +} + +// --------------------------------------------------------------------------- +// Response parsing +// --------------------------------------------------------------------------- + +struct ParsedReActResponse { + thought: String, + action: Option, + answer: Option, +} + +/// Parse the AI's text response into a ReAct step. +/// +/// The AI is prompted (via system prompt in `ReactAgent::new`) to respond with +/// JSON in one of these forms: +/// +/// ```json +/// { "thought": "...", "action": { "name": "tool_name", "arguments": {...} } } +/// { "thought": "...", "answer": "final answer text" } +/// ``` +fn parse_react_response(content: &str) -> ParsedReActResponse { + let json_str = extract_json(content).unwrap_or_else(|| content.trim().to_string()); + + #[derive(serde::Deserialize)] + struct RawStep { + #[serde(default)] + thought: Option, + #[serde(default)] + action: Option, + #[serde(default)] + answer: Option, + #[serde(default)] + name: Option, + #[serde(default, rename = "arguments")] + args: Option, + } + + #[derive(serde::Deserialize)] + struct RawAction { + #[serde(default)] + name: Option, + #[serde(default, rename = "arguments")] + args: Option, + } + + match serde_json::from_str::(&json_str) { + Ok(raw) => { + let thought = raw.thought.unwrap_or_else(|| "Thinking...".to_string()); + let answer = raw.answer; + let action = raw.action.map(|a| Action { + id: Uuid::new_v4().to_string(), + name: a.name.unwrap_or_default(), + args: a.args.unwrap_or(serde_json::Value::Null), + }); + // Handle flat format: { "name": "...", "arguments": {...} } + let action = action.or_else(|| { + if raw.name.is_some() || raw.args.is_some() { + Some(Action { + id: Uuid::new_v4().to_string(), + name: raw.name.unwrap_or_default(), + args: raw.args.unwrap_or(serde_json::Value::Null), + }) + } else { + None + } + }); + + ParsedReActResponse { + thought, + action, + answer, + } + } + Err(_) => ParsedReActResponse { + thought: content.to_string(), + action: None, + answer: None, + }, + } +} + +/// Extract the first JSON object or array from a string, handling markdown fences. +fn extract_json(s: &str) -> Option { + let trimmed = s.trim(); + if trimmed.starts_with('{') || trimmed.starts_with('[') { + return Some(trimmed.to_string()); + } + for line in trimmed.lines() { + let line = line.trim(); + if line.starts_with("```json") || line.starts_with("```") { + let mut buf = String::new(); + let mut found_start = false; + for l in trimmed.lines() { + let l = l.trim(); + if !found_start && (l == "```json" || l == "```") { + found_start = true; + continue; + } + if found_start && l == "```" { + break; + } + if found_start { + buf.push_str(l); + buf.push('\n'); + } + } + let result = buf.trim().to_string(); + if !result.is_empty() { + return Some(result); + } + } + } + None +} + +/// Build an assistant message with tool_calls from an Action. +#[allow(deprecated)] +fn build_tool_call_message(action: &Action) -> ChatCompletionRequestMessage { + let fn_arg_str = serde_json::to_string(&action.args).unwrap_or_else(|_| "{}".to_string()); + + ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage { + content: Some(ChatCompletionRequestAssistantMessageContent::Text(format!( + "Action: {}", + action.name + ))), + name: None, + refusal: None, + audio: None, + tool_calls: Some(vec![ChatCompletionMessageToolCalls::Function( + ChatCompletionMessageToolCall { + id: action.id.clone(), + function: FunctionCall { + name: action.name.clone(), + arguments: fn_arg_str, + }, + }, + )]), + function_call: None, + }) +} diff --git a/libs/agent/react/mod.rs b/libs/agent/react/mod.rs new file mode 100644 index 0000000..edb9b6e --- /dev/null +++ b/libs/agent/react/mod.rs @@ -0,0 +1,13 @@ +//! ReAct (Reason + Act) agent loop for structured tool use. +//! +//! The agent alternates between a **thought** phase (reasoning about what to do) +//! and an **action** phase (calling tools). Observations from tool results feed +//! back into the next thought, enabling multi-step reasoning. + +pub mod hooks; +pub mod loop_core; +pub mod types; + +pub use hooks::{Hook, HookAction, NoopHook, ToolCallAction, TracingHook}; +pub use loop_core::ReactAgent; +pub use types::{ReactConfig, ReactStep}; diff --git a/libs/agent/react/types.rs b/libs/agent/react/types.rs new file mode 100644 index 0000000..ce06d26 --- /dev/null +++ b/libs/agent/react/types.rs @@ -0,0 +1,94 @@ +//! ReAct agent types. + +use std::sync::Arc; + +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +/// Callback for executing a named tool with JSON arguments. +pub type ToolExecutorFn = Arc< + dyn Fn( + String, + serde_json::Value, + ) -> std::pin::Pin< + Box> + Send>, + > + Send + + Sync, +>; + +/// Configuration for a ReAct agent. +#[derive(Clone)] +pub struct ReactConfig { + /// Maximum number of ReAct steps before giving up. + pub max_steps: usize, + /// Stop sequences that trigger early termination. + pub stop_sequences: Vec, + /// Optional tool executor callback. If `None`, tool calls return an error. + pub tool_executor: Option, +} + +impl Default for ReactConfig { + fn default() -> Self { + Self { + max_steps: 10, + stop_sequences: Vec::new(), + tool_executor: None, + } + } +} + +impl std::fmt::Debug for ReactConfig { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ReactConfig") + .field("max_steps", &self.max_steps) + .field("stop_sequences", &self.stop_sequences) + .field( + "tool_executor", + &self + .tool_executor + .as_ref() + .map(|_| "...") + .unwrap_or(""), + ) + .finish() + } +} + +/// An action (tool call) requested by the ReAct agent. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Action { + pub id: String, + pub name: String, + pub args: serde_json::Value, +} + +impl Action { + pub fn new(name: &str, args: serde_json::Value) -> Self { + Self { + id: Uuid::new_v4().to_string(), + name: name.to_string(), + args, + } + } +} + +// --------------------------------------------------------------------------- +// Step events emitted during the ReAct loop +// --------------------------------------------------------------------------- + +/// A single event emitted during a ReAct step. +/// +/// These are yielded via the streaming callback so the caller (service layer) +/// can persist them to the database or forward them to a WebSocket client. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ReactStep { + /// The AI's reasoning/thinking for this step. + Thought { step: usize, thought: String }, + /// The AI requested a tool call. + Action { step: usize, action: Action }, + /// Result returned by the executed tool. + Observation { step: usize, observation: String }, + /// Final answer produced by the agent. + Answer { step: usize, answer: String }, +} diff --git a/libs/agent/task/mod.rs b/libs/agent/task/mod.rs new file mode 100644 index 0000000..37aa529 --- /dev/null +++ b/libs/agent/task/mod.rs @@ -0,0 +1,22 @@ +//! Agent task service — unified task/sub-agent execution framework. +//! +//! A task (`agent_task` record) can be: +//! - A **root task**: initiated by a user or system event. +//! The parent/Supervisor agent spawns sub-tasks and coordinates their results. +//! - A **sub-task**: a unit of work executed by a sub-agent. +//! +//! Execution flow: +//! 1. Create task record (status = pending) +//! 2. Notify listeners (WebSocket: task_started) +//! 3. Spawn execution (tokio::spawn or via room queue) +//! 4. Update progress (status = running, progress = "step 2/5: ...") +//! 5. On completion: update output + status = done / error + status = failed +//! 6. Notify listeners (WebSocket: task_done) +//! 7. If root task: notify parent/Supervisor to aggregate results +//! +//! This module is intentionally kept simple and synchronous with the DB. +//! Long-running execution is delegated to the caller (tokio::spawn). + +pub mod service; + +pub use service::TaskService; diff --git a/libs/agent/task/service.rs b/libs/agent/task/service.rs new file mode 100644 index 0000000..801b59c --- /dev/null +++ b/libs/agent/task/service.rs @@ -0,0 +1,209 @@ +//! Task service for creating, tracking, and executing agent tasks. +//! +//! All methods are async and interact with the database directly. +//! Execution of the task logic (running the ReAct loop, etc.) is delegated +//! to the caller — this service only manages task lifecycle and state. + +use db::database::AppDatabase; +use models::agent_task::{ + ActiveModel, AgentType, Column as C, Entity, Model, TaskStatus, +}; +use sea_orm::{ + entity::EntityTrait, query::{QueryFilter, QueryOrder, QuerySelect}, ActiveModelTrait, + ColumnTrait, DbErr, +}; + +/// Service for managing agent tasks (root tasks and sub-tasks). +#[derive(Clone)] +pub struct TaskService { + db: AppDatabase, +} + +impl TaskService { + pub fn new(db: AppDatabase) -> Self { + Self { db } + } + + /// Create a new task (root or sub-task) with status = pending. + pub async fn create( + &self, + project_uuid: impl Into, + input: impl Into, + agent_type: AgentType, + ) -> Result { + self.create_with_parent(project_uuid, None, input, agent_type, None).await + } + + /// Create a new sub-task with a parent reference. + pub async fn create_subtask( + &self, + project_uuid: impl Into, + parent_id: i64, + input: impl Into, + agent_type: AgentType, + title: Option, + ) -> Result { + self.create_with_parent(project_uuid, Some(parent_id), input, agent_type, title) + .await + } + + async fn create_with_parent( + &self, + project_uuid: impl Into, + parent_id: Option, + input: impl Into, + agent_type: AgentType, + title: Option, + ) -> Result { + let model = ActiveModel { + project_uuid: sea_orm::Set(project_uuid.into()), + parent_id: sea_orm::Set(parent_id), + agent_type: sea_orm::Set(agent_type), + status: sea_orm::Set(TaskStatus::Pending), + title: sea_orm::Set(title), + input: sea_orm::Set(input.into()), + ..Default::default() + }; + model.insert(&self.db).await + } + + /// Mark a task as running and record the start time. + pub async fn start(&self, task_id: i64) -> Result { + let model = Entity::find_by_id(task_id).one(&self.db).await?; + let model = model.ok_or_else(|| { + DbErr::RecordNotFound("agent_task not found".to_string()) + })?; + + let mut active: ActiveModel = model.into(); + active.status = sea_orm::Set(TaskStatus::Running); + active.started_at = sea_orm::Set(Some(chrono::Utc::now().into())); + active.updated_at = sea_orm::Set(chrono::Utc::now().into()); + active.update(&self.db).await + } + + /// Update progress text (e.g., "step 2/5: analyzing PR"). + pub async fn update_progress(&self, task_id: i64, progress: impl Into) -> Result<(), DbErr> { + let model = Entity::find_by_id(task_id).one(&self.db).await?; + let model = model.ok_or_else(|| { + DbErr::RecordNotFound("agent_task not found".to_string()) + })?; + + let mut active: ActiveModel = model.into(); + active.progress = sea_orm::Set(Some(progress.into())); + active.updated_at = sea_orm::Set(chrono::Utc::now().into()); + active.update(&self.db).await?; + Ok(()) + } + + /// Mark a task as completed with the output text. + pub async fn complete(&self, task_id: i64, output: impl Into) -> Result { + let model = Entity::find_by_id(task_id).one(&self.db).await?; + let model = model.ok_or_else(|| { + DbErr::RecordNotFound("agent_task not found".to_string()) + })?; + + let mut active: ActiveModel = model.into(); + active.status = sea_orm::Set(TaskStatus::Done); + active.output = sea_orm::Set(Some(output.into())); + active.done_at = sea_orm::Set(Some(chrono::Utc::now().into())); + active.updated_at = sea_orm::Set(chrono::Utc::now().into()); + active.update(&self.db).await + } + + /// Mark a task as failed with an error message. + pub async fn fail(&self, task_id: i64, error: impl Into) -> Result { + let model = Entity::find_by_id(task_id).one(&self.db).await?; + let model = model.ok_or_else(|| { + DbErr::RecordNotFound("agent_task not found".to_string()) + })?; + + let mut active: ActiveModel = model.into(); + active.status = sea_orm::Set(TaskStatus::Failed); + active.error = sea_orm::Set(Some(error.into())); + active.done_at = sea_orm::Set(Some(chrono::Utc::now().into())); + active.updated_at = sea_orm::Set(chrono::Utc::now().into()); + active.update(&self.db).await + } + + /// Get a task by ID. + pub async fn get(&self, task_id: i64) -> Result, DbErr> { + Entity::find_by_id(task_id).one(&self.db).await + } + + /// List all sub-tasks for a parent task. + pub async fn children(&self, parent_id: i64) -> Result, DbErr> { + Entity::find() + .filter(C::ParentId.eq(parent_id)) + .order_by_asc(C::CreatedAt) + .all(&self.db) + .await + } + + /// List all active (non-terminal) tasks for a project. + pub async fn active_tasks(&self, project_uuid: impl Into) -> Result, DbErr> { + let uuid: uuid::Uuid = project_uuid.into(); + Entity::find() + .filter(C::ProjectUuid.eq(uuid)) + .filter(C::Status.is_in([TaskStatus::Pending, TaskStatus::Running])) + .order_by_desc(C::CreatedAt) + .all(&self.db) + .await + } + + /// List all tasks (root only) for a project. + pub async fn list( + &self, + project_uuid: impl Into, + limit: u64, + ) -> Result, DbErr> { + let uuid: uuid::Uuid = project_uuid.into(); + Entity::find() + .filter(C::ProjectUuid.eq(uuid)) + .filter(C::ParentId.is_null()) + .order_by_desc(C::CreatedAt) + .limit(limit) + .all(&self.db) + .await + } + + /// Delete a task and all its sub-tasks recursively. + /// Only allows deletion of root tasks. + pub async fn delete(&self, task_id: i64) -> Result<(), DbErr> { + self.delete_recursive(task_id).await + } + + async fn delete_recursive(&self, task_id: i64) -> Result<(), DbErr> { + // Collect all task IDs to delete using an explicit stack (avoiding async recursion). + let mut stack = vec![task_id]; + let mut idx = 0; + while idx < stack.len() { + let current = stack[idx]; + let children = Entity::find() + .filter(C::ParentId.eq(current)) + .all(&self.db) + .await?; + for child in children { + stack.push(child.id); + } + idx += 1; + } + + for task_id in stack { + let model = Entity::find_by_id(task_id).one(&self.db).await?; + if let Some(m) = model { + let active: ActiveModel = m.into(); + active.delete(&self.db).await?; + } + } + Ok(()) + } + + /// Check if all sub-tasks of a given parent are done. + pub async fn are_children_done(&self, parent_id: i64) -> Result { + let children = self.children(parent_id).await?; + if children.is_empty() { + return Ok(true); + } + Ok(children.iter().all(|c| c.is_done())) + } +} diff --git a/libs/agent/tokent.rs b/libs/agent/tokent.rs new file mode 100644 index 0000000..e49d493 --- /dev/null +++ b/libs/agent/tokent.rs @@ -0,0 +1,199 @@ +//! Token counting utilities using tiktoken. +//! +//! Provides accurate token counting for OpenAI-compatible models. +//! Uses the `tiktoken-rs` crate (already in workspace dependencies). +//! +//! # Strategy +//! +//! Remote usage from API response is always preferred. When the API does not +//! return usage metadata (e.g., local models, streaming), tiktoken is used as +//! a fallback for accurate counting. + +use crate::error::{AgentError, Result}; + +/// Token usage data. Use `from_remote()` when the API returns usage info, +/// or `from_estimate()` when falling back to tiktoken. +#[derive(Debug, Clone, Copy, Default, serde::Serialize, serde::Deserialize)] +pub struct TokenUsage { + pub input_tokens: i64, + pub output_tokens: i64, +} + +impl TokenUsage { + /// Create from remote API usage data. Returns `None` if all values are zero + /// (some providers return zeroed usage on error). + pub fn from_remote(prompt_tokens: u32, completion_tokens: u32) -> Option { + if prompt_tokens == 0 && completion_tokens == 0 { + None + } else { + Some(Self { + input_tokens: prompt_tokens as i64, + output_tokens: completion_tokens as i64, + }) + } + } + + /// Create from tiktoken estimate. + pub fn from_estimate(input_tokens: usize, output_tokens: usize) -> Self { + Self { + input_tokens: input_tokens as i64, + output_tokens: output_tokens as i64, + } + } + + pub fn total(&self) -> i64 { + self.input_tokens + self.output_tokens + } +} + +/// Resolve token usage: remote data is preferred, tiktoken is the fallback. +/// +/// `remote` — `Some` when API returned usage; `None` when not available. +/// `model` — model name, required for tiktoken fallback. +/// `input_text` — input text length hint for fallback estimate (uses ~4 chars/token). +pub fn resolve_usage( + remote: Option, + model: &str, + input_text: &str, + output_text: &str, +) -> TokenUsage { + if let Some(usage) = remote { + return usage; + } + + // Fallback: tiktoken estimate + let input = count_message_text(input_text, model).unwrap_or_else(|_| { + // Rough estimate: ~4 chars per token + (input_text.len() / 4).max(1) + }); + let output = output_text.len() / 4; + TokenUsage::from_estimate(input, output) +} + +/// Estimate the number of tokens in a text string using the appropriate tokenizer. +pub fn count_text(text: &str, model: &str) -> Result { + let bpe = get_tokenizer(model)?; + // Use encode_ordinary since we're counting raw text, not chat messages + let tokens = bpe.encode_ordinary(text); + Ok(tokens.len()) +} + +/// Count tokens in a single chat message (text content only). +pub fn count_message_text(text: &str, model: &str) -> Result { + let bpe = get_tokenizer(model)?; + // For messages, use encode_with_special_tokens to count role/separator tokens + let tokens = bpe.encode_with_special_tokens(text); + Ok(tokens.len()) +} + +/// Estimate the maximum number of characters that fit within a token budget +/// given a model's context limit and a reserve for the output. +/// +/// Uses a rough estimate of ~4 characters per token (typical for English text). +/// For non-Latin scripts, this is less accurate. +pub fn estimate_max_chars( + _model: &str, + context_limit: usize, + reserve_output_tokens: usize, +) -> Result { + let chars_per_token = 4; + // Subtract reserve for output, system overhead, and a safety margin (10%) + let safe_limit = context_limit + .saturating_sub(reserve_output_tokens) + .saturating_sub(512); // 512 token safety margin + Ok(safe_limit.saturating_mul(chars_per_token)) +} + +/// Truncate text to fit within a token budget for a given model. +pub fn truncate_to_token_budget( + text: &str, + model: &str, + context_limit: usize, + reserve_output_tokens: usize, +) -> Result { + let max_chars = estimate_max_chars(model, context_limit, reserve_output_tokens)?; + + if text.len() <= max_chars { + return Ok(text.to_string()); + } + + // Binary search for the exact character boundary that fits the token budget + let bpe = get_tokenizer(model)?; + let mut low = 0usize; + let mut high = text.len(); + let mut result = text.to_string(); + + while low + 100 < high { + let mid = (low + high) / 2; + let candidate = &text[..mid]; + let tokens = bpe.encode_ordinary(candidate); + + if tokens.len() <= safe_token_budget(context_limit, reserve_output_tokens) { + result = candidate.to_string(); + low = mid; + } else { + high = mid; + } + } + + Ok(result) +} + +/// Returns the safe token budget (context limit minus reserve and margin). +fn safe_token_budget(context_limit: usize, reserve: usize) -> usize { + context_limit.saturating_sub(reserve).saturating_sub(512) +} + +/// Get the appropriate tiktoken tokenizer for a model. +/// +/// Model name mapping: +/// - "gpt-4o", "o1", "o3", "o4" → o200k_base +/// - "claude-*", "gpt-3.5-turbo", "gpt-4" → cl100k_base +/// - Unknown → cl100k_base (safe fallback) +fn get_tokenizer(model: &str) -> Result { + use tiktoken_rs; + + // Try model-specific tokenizer first + if let Ok(bpe) = tiktoken_rs::get_bpe_from_model(model) { + return Ok(bpe); + } + + // Fallback: use cl100k_base for unknown models + tiktoken_rs::cl100k_base() + .map_err(|e| AgentError::Internal(format!("Failed to init tokenizer: {}", e))) +} + +/// Estimate tokens for a simple prefix/suffix pattern (e.g., "assistant\n" + text). +/// Returns the token count including the prefix. +pub fn count_with_prefix(text: &str, prefix: &str, model: &str) -> Result { + let bpe = get_tokenizer(model)?; + let prefixed = format!("{}{}", prefix, text); + let tokens = bpe.encode_with_special_tokens(&prefixed); + Ok(tokens.len()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_count_text() { + let count = count_text("Hello, world!", "gpt-4").unwrap(); + assert!(count > 0); + } + + #[test] + fn test_estimate_max_chars() { + // gpt-4o context ~128k tokens + let chars = estimate_max_chars("gpt-4o", 128_000, 2048).unwrap(); + assert!(chars > 0); + } + + #[test] + fn test_truncate() { + // 50k chars exceeds budget: 8192 - 512 - 512 = 7168 tokens → ~28k chars + let long_text = "a".repeat(50000); + let truncated = truncate_to_token_budget(&long_text, "gpt-4o", 8192, 512).unwrap(); + assert!(truncated.len() < long_text.len()); + } +} diff --git a/libs/agent/tool/call.rs b/libs/agent/tool/call.rs new file mode 100644 index 0000000..0aa38b0 --- /dev/null +++ b/libs/agent/tool/call.rs @@ -0,0 +1,108 @@ +//! Tool call and result types. + +use serde::{Deserialize, Serialize}; + +/// A single tool invocation requested by the AI model. +#[derive(Debug, Clone)] +pub struct ToolCall { + pub id: String, + pub name: String, + pub arguments: String, +} + +impl ToolCall { + pub fn arguments_json(&self) -> serde_json::Result { + serde_json::from_str(&self.arguments) + } + + pub fn parse_args(&self) -> serde_json::Result { + serde_json::from_str(&self.arguments) + } +} + +/// The result of executing a tool call. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ToolResult { + /// Successful result with a JSON value. + Ok(serde_json::Value), + /// Error result with an error message. + Error(String), +} + +impl ToolResult { + pub fn ok(value: T) -> Self { + Self::Ok(serde_json::to_value(value).unwrap_or(serde_json::Value::Null)) + } + + pub fn error(message: impl Into) -> Self { + Self::Error(message.into()) + } + + pub fn is_error(&self) -> bool { + matches!(self, Self::Error(_)) + } +} + +/// Errors that can occur during tool execution. +#[derive(Debug, thiserror::Error)] +pub enum ToolError { + #[error("tool not found: {0}")] + NotFound(String), + + #[error("argument parse error: {0}")] + ParseError(String), + + #[error("execution error: {0}")] + ExecutionError(String), + + #[error("recursion limit exceeded (max depth: {max_depth})")] + RecursionLimitExceeded { max_depth: u32 }, + + #[error("max tool calls exceeded: {0}")] + MaxToolCallsExceeded(usize), + + #[error("internal error: {0}")] + Internal(String), +} + +impl ToolError { + pub fn into_result(self) -> ToolResult { + ToolResult::Error(self.to_string()) + } +} + +impl From for ToolError { + fn from(e: serde_json::Error) -> Self { + Self::ParseError(e.to_string()) + } +} + +/// A completed tool call with its result, ready to be sent back to the AI. +#[derive(Debug, Clone)] +pub struct ToolCallResult { + /// The original tool call. + pub call: ToolCall, + /// The execution result. + pub result: ToolResult, +} + +impl ToolCallResult { + pub fn ok(call: ToolCall, value: serde_json::Value) -> Self { + Self { + call, + result: ToolResult::Ok(value), + } + } + + pub fn error(call: ToolCall, message: impl Into) -> Self { + Self { + call, + result: ToolResult::Error(message.into()), + } + } + + pub fn from_result(call: ToolCall, result: ToolResult) -> Self { + Self { call, result } + } +} diff --git a/libs/agent/tool/context.rs b/libs/agent/tool/context.rs new file mode 100644 index 0000000..42697b2 --- /dev/null +++ b/libs/agent/tool/context.rs @@ -0,0 +1,133 @@ +//! Execution context passed to each tool handler. +//! +//! Carries runtime information a tool handler needs: database, cache, +//! request metadata, and the tool registry. Cheap to clone via `Arc`. + +use std::sync::Arc; + +use db::cache::AppCache; +use db::database::AppDatabase; +use uuid::Uuid; + +use super::registry::ToolRegistry; + +/// Context available during tool execution. Cheap to clone via `Arc`. +#[derive(Clone)] +pub struct ToolContext { + inner: Arc, +} + +#[derive(Clone)] +struct Inner { + pub db: AppDatabase, + pub cache: AppCache, + pub room_id: Uuid, + pub sender_id: Option, + pub project_id: Uuid, + pub registry: ToolRegistry, + depth: u32, + max_depth: u32, + tool_call_count: usize, + max_tool_calls: usize, +} + +impl ToolContext { + pub fn new(db: AppDatabase, cache: AppCache, room_id: Uuid, sender_id: Option) -> Self { + Self { + inner: Arc::new(Inner { + db, + cache, + room_id, + sender_id, + project_id: Uuid::nil(), + registry: ToolRegistry::new(), + depth: 0, + max_depth: 5, + tool_call_count: 0, + max_tool_calls: 128, + }), + } + } + + pub fn with_project(mut self, project_id: Uuid) -> Self { + Arc::make_mut(&mut self.inner).project_id = project_id; + self + } + + pub fn with_registry(mut self, registry: ToolRegistry) -> Self { + Arc::make_mut(&mut self.inner).registry = registry; + self + } + + pub fn with_max_depth(mut self, max_depth: u32) -> Self { + Arc::make_mut(&mut self.inner).max_depth = max_depth; + self + } + + pub fn with_max_tool_calls(mut self, max: usize) -> Self { + Arc::make_mut(&mut self.inner).max_tool_calls = max; + self + } + + pub fn recursion_exceeded(&self) -> bool { + self.inner.depth >= self.inner.max_depth + } + + pub fn tool_calls_exceeded(&self) -> bool { + self.inner.tool_call_count >= self.inner.max_tool_calls + } + + /// Current recursion depth. + pub fn depth(&self) -> u32 { + self.inner.depth + } + + /// Current tool call count. + pub fn tool_call_count(&self) -> usize { + self.inner.tool_call_count + } + + /// Increments the tool call count. + pub(crate) fn increment_tool_calls(&mut self) { + Arc::make_mut(&mut self.inner).tool_call_count += 1; + } + + /// Returns a child context for a recursive tool call (depth + 1). + pub(crate) fn child_context(&self) -> Self { + let mut inner = (*self.inner).clone(); + inner.depth += 1; + Self { + inner: Arc::new(inner), + } + } + + /// Database connection. + pub fn db(&self) -> &AppDatabase { + &self.inner.db + } + + /// Redis cache. + pub fn cache(&self) -> &AppCache { + &self.inner.cache + } + + /// Room where the original message was sent. + pub fn room_id(&self) -> Uuid { + self.inner.room_id + } + + /// User who sent the original message. + pub fn sender_id(&self) -> Option { + self.inner.sender_id + } + + /// Project context for the room. + pub fn project_id(&self) -> Uuid { + self.inner.project_id + } + + /// Tool registry for this request. + pub fn registry(&self) -> &ToolRegistry { + &self.inner.registry + } +} diff --git a/libs/agent/tool/definition.rs b/libs/agent/tool/definition.rs new file mode 100644 index 0000000..104c319 --- /dev/null +++ b/libs/agent/tool/definition.rs @@ -0,0 +1,89 @@ +//! Tool definition: schema, parameters, and OpenAI-compatible tool objects. + +use async_openai::types::chat::{ChatCompletionTool, FunctionObject}; +use serde::{Deserialize, Serialize}; + +/// A JSON Schema parameter definition for a tool argument. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolParam { + pub name: String, + #[serde(rename = "type")] + pub param_type: String, + pub description: Option, + pub required: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub properties: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub items: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolSchema { + #[serde(rename = "type", default)] + pub schema_type: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub properties: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub required: Option>, +} + +impl Default for ToolSchema { + fn default() -> Self { + Self { + schema_type: "object".to_string(), + properties: None, + required: None, + } + } +} + +/// A tool definition with schema and metadata. +#[derive(Debug, Clone)] +pub struct ToolDefinition { + pub name: String, + pub description: Option, + pub parameters: Option, + pub strict: bool, +} + +impl ToolDefinition { + pub fn new(name: impl Into) -> Self { + Self { + name: name.into(), + description: None, + parameters: None, + strict: false, + } + } + + pub fn description(mut self, description: impl Into) -> Self { + self.description = Some(description.into()); + self + } + + pub fn parameters(mut self, schema: ToolSchema) -> Self { + self.parameters = Some(schema); + self + } + + pub fn strict(mut self) -> Self { + self.strict = true; + self + } + + pub fn to_openai_tool(&self) -> ChatCompletionTool { + let parameters = self + .parameters + .as_ref() + .map(|s| serde_json::to_value(s).unwrap_or(serde_json::Value::Null)); + + ChatCompletionTool { + function: FunctionObject { + name: self.name.clone(), + description: self.description.clone(), + parameters, + strict: if self.strict { Some(true) } else { None }, + }, + } + } +} diff --git a/libs/agent/tool/examples.rs b/libs/agent/tool/examples.rs new file mode 100644 index 0000000..09a4109 --- /dev/null +++ b/libs/agent/tool/examples.rs @@ -0,0 +1,113 @@ +//! Examples demonstrating the `#[tool]` procedural macro. +//! +//! The macro eliminates boilerplate for defining tools: +//! Instead of manually building `ToolDefinition` + `ToolSchema` + serde structures, +//! you write a typed Rust function and derive everything automatically. +//! +//! # Manual way (without macro) +//! +//! ``` +//! use agent::{ToolDefinition, ToolParam, ToolRegistry, ToolSchema}; +//! use serde_json::json; +//! +//! fn register_manual(registry: &mut ToolRegistry) { +//! registry.register_fn("search_issues", |_ctx, args| { +//! async move { +//! let args: serde_json::Value = args; +//! let title = args["title"].as_str().unwrap_or(""); +//! Ok(json!([{ "title": title, "status": "open" }])) +//! }.boxed() +//! }); +//! } +//! ``` +//! +//! # With `#[tool]` macro (recommended) +//! +//! ``` +//! use agent_tool_derive::tool; +//! use agent::{ToolDefinition, ToolRegistry, ToolError}; +//! +//! #[tool(description = "Search issues by title", params( +//! title = "Issue title to search for", +//! status = "Filter by status (open/closed/all)" +//! ))] +//! async fn search_issues( +//! title: String, +//! status: Option, +//! ) -> Result, String> { +//! Ok(vec![serde_json::json!({ +//! "title": title, +//! "status": status.unwrap_or_else(|| "open".to_string()) +//! })]) +//! } +//! +//! fn register_with_macro(registry: &mut ToolRegistry) { +//! register_search_issues(registry); // Generated by #[tool] +//! } +//! ``` +//! +//! The macro generates: +//! - `SearchIssuesParameters` struct (serde Deserialize) +//! - `SEARCH_ISSUES_DEFINITION: ToolDefinition` constant +//! - `register_search_issues(registry: &mut ToolRegistry)` helper + +#[cfg(test)] +mod tests { + use crate::{ToolDefinition, ToolError, ToolRegistry}; + + // Example: using the manual approach (without macro) + // This demonstrates the baseline — how it looks without #[tool] + #[test] + fn manual_tool_registration_shows_boilerplate() { + use futures::FutureExt; + + let mut registry = ToolRegistry::new(); + registry.register_fn("echo", |_ctx, args| { + async move { + let text: serde_json::Value = serde_json::from_value(args) + .map_err(|e| ToolError::ParseError(e.to_string()))?; + Ok(text) + } + .boxed() + }); + assert_eq!(registry.len(), 1); + assert!(registry.get("echo").is_some()); + } + + // NOTE: To use #[tool], the `agent-tool-derive` crate must be a dependency. + // Since proc-macro crates cannot be conditionally compiled via cfg_attr, + // the macro usage example is documented above in the module doc comment. + // + // Full working example (requires agent-tool-derive dependency): + // + // ```ignore + // use agent_tool_derive::tool; + // + // #[tool(description = "Echo back the input text", params( + // text = "The text to echo back" + // ))] + // async fn echo(text: String) -> Result { + // Ok(text) + // } + // + // #[test] + // fn test_macro_generates_definition() { + // let def = ECHO_DEFINITION; + // assert_eq!(def.name, "echo"); + // assert!(def.description.is_some()); + // assert!(def.parameters.is_some()); + // + // let schema = def.parameters.unwrap(); + // assert_eq!(schema.schema_type, "object"); + // let props = schema.properties.unwrap(); + // assert!(props.contains_key("text")); + // } + // + // #[test] + // fn test_macro_registers_tool() { + // let mut registry = ToolRegistry::new(); + // register_echo(&mut registry); + // assert!(registry.get("echo").is_some()); + // } + // ``` +} diff --git a/libs/agent/tool/executor.rs b/libs/agent/tool/executor.rs new file mode 100644 index 0000000..1753fde --- /dev/null +++ b/libs/agent/tool/executor.rs @@ -0,0 +1,142 @@ +//! Executes tool calls and converts results to OpenAI `tool` messages. + +use futures::StreamExt; +use futures::stream; + +use async_openai::types::chat::{ + ChatCompletionRequestMessage, ChatCompletionRequestToolMessage, + ChatCompletionRequestToolMessageContent, +}; + +use super::call::{ToolCall, ToolCallResult, ToolError, ToolResult}; +use super::context::ToolContext; + +pub struct ToolExecutor { + max_tool_calls: usize, + max_depth: u32, + max_concurrency: usize, +} + +impl Default for ToolExecutor { + fn default() -> Self { + Self { + max_tool_calls: 128, + max_depth: 5, + max_concurrency: 8, + } + } +} + +impl ToolExecutor { + pub fn new() -> Self { + Self::default() + } + + pub fn with_max_tool_calls(mut self, max: usize) -> Self { + self.max_tool_calls = max; + self + } + + pub fn with_max_depth(mut self, depth: u32) -> Self { + self.max_depth = depth; + self + } + + /// Set the maximum number of tool calls executed concurrently. + /// Defaults to 8. Set to 1 for strictly sequential execution. + pub fn with_max_concurrency(mut self, n: usize) -> Self { + self.max_concurrency = n; + self + } + + /// # Errors + /// + /// Returns `ToolError::MaxToolCallsExceeded` if the total number of tool calls + /// exceeds `max_tool_calls`. + pub async fn execute_batch( + &self, + calls: Vec, + ctx: &mut ToolContext, + ) -> Result, ToolError> { + if ctx.tool_calls_exceeded() { + return Err(ToolError::MaxToolCallsExceeded(ctx.tool_call_count())); + } + if ctx.recursion_exceeded() { + return Err(ToolError::RecursionLimitExceeded { + max_depth: ctx.depth(), + }); + } + + ctx.increment_tool_calls(); + + let concurrency = self.max_concurrency; + use std::sync::Mutex; + let results: Mutex> = Mutex::new(Vec::with_capacity(calls.len())); + + stream::iter(calls.into_iter().map(|call| { + let child_ctx = ctx.child_context(); + async move { self.execute_one(call, child_ctx).await } + })) + .buffer_unordered(concurrency) + .for_each_concurrent( + concurrency, + |result: Result| async { + let r = result.unwrap_or_else(|e| { + ToolCallResult::error( + ToolCall { + id: String::new(), + name: String::new(), + arguments: String::new(), + }, + e.to_string(), + ) + }); + results.lock().unwrap().push(r); + }, + ) + .await; + + Ok(results.into_inner().unwrap()) + } + + async fn execute_one( + &self, + call: ToolCall, + ctx: ToolContext, + ) -> Result { + let handler = ctx + .registry() + .get(&call.name) + .ok_or_else(|| ToolError::NotFound(call.name.clone()))? + .clone(); + + let args = call.arguments_json()?; + + match handler.execute(ctx, args).await { + Ok(value) => Ok(ToolCallResult::ok(call, value)), + Err(e) => Ok(ToolCallResult::error(call, e.to_string())), + } + } + + pub fn to_tool_messages(results: &[ToolCallResult]) -> Vec { + results + .iter() + .map(|r| { + let content = match &r.result { + ToolResult::Ok(v) => { + serde_json::to_string(v).unwrap_or_else(|_| "null".to_string()) + } + ToolResult::Error(msg) => serde_json::to_string(&serde_json::json!({ + "error": msg + })) + .unwrap_or_else(|_| r#"{"error":"unknown error"}"#.to_string()), + }; + + ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage { + tool_call_id: r.call.id.clone(), + content: ChatCompletionRequestToolMessageContent::Text(content), + }) + }) + .collect() + } +} diff --git a/libs/agent/tool/mod.rs b/libs/agent/tool/mod.rs new file mode 100644 index 0000000..b0bacdf --- /dev/null +++ b/libs/agent/tool/mod.rs @@ -0,0 +1,27 @@ +//! Unified function call routing for AI agents. +//! +//! Provides a type-safe, request-scoped tool registry and executor. +//! +//! # Architecture +//! +//! - [`definition`](definition) — Tool schemas: name, description, parameter JSON schema +//! - [`registry`](registry) — Request-scoped `ToolRegistry` mapping names → handlers +//! - [`call`](call) — Execution types: `ToolCall`, `ToolResult`, `ToolError` +//! - [`context`](context) — Execution context passed to each tool handler +//! - [`executor`](executor) — `ToolExecutor` coordinating lookup → execute → result +//! - [`examples`](examples) — `#[tool]` macro usage guide + +pub mod call; +pub mod context; +pub mod definition; +pub mod executor; +pub mod registry; + +#[cfg(test)] +mod examples; + +pub use call::{ToolCall, ToolCallResult, ToolError, ToolResult}; +pub use context::ToolContext; +pub use definition::{ToolDefinition, ToolParam, ToolSchema}; +pub use executor::ToolExecutor; +pub use registry::ToolRegistry; diff --git a/libs/agent/tool/registry.rs b/libs/agent/tool/registry.rs new file mode 100644 index 0000000..9c0efd2 --- /dev/null +++ b/libs/agent/tool/registry.rs @@ -0,0 +1,95 @@ +//! Request-scoped tool registry. +//! +//! Tools are registered per-request (not globally) to keep the system testable +//! and allow different request contexts to have different tool sets. + +use std::collections::HashMap; + +use futures::FutureExt; + +use super::call::ToolError; +use super::context::ToolContext; +use super::definition::ToolDefinition; + +/// Inner function pointer type for tool handlers. +type InnerHandlerFn = dyn Fn( + ToolContext, + serde_json::Value, + ) -> std::pin::Pin< + Box> + Send>, + > + Send + + Sync; + +/// Wrapper around `Arc` for `Clone` implementability. +#[derive(Clone)] +pub struct ToolHandler(std::sync::Arc); + +impl ToolHandler { + pub async fn execute( + &self, + ctx: ToolContext, + args: serde_json::Value, + ) -> Result { + (self.0)(ctx, args).await + } +} + +/// A request-scoped registry mapping tool names to their handlers. +#[derive(Clone, Default)] +pub struct ToolRegistry { + handlers: HashMap, + definitions: HashMap, +} + +impl ToolRegistry { + pub fn new() -> Self { + Self::default() + } + + pub fn register_fn(&mut self, name: impl Into, handler: F) -> &mut Self + where + F: Fn(ToolContext, serde_json::Value) -> Fut + Send + Sync + 'static, + Fut: std::future::Future> + Send + 'static, + { + let name_str = name.into(); + let def = ToolDefinition::new(&name_str); + let handler_fn: std::sync::Arc = + std::sync::Arc::new(move |ctx, args| handler(ctx, args).boxed()); + self.register(def, ToolHandler(handler_fn)); + self + } + + pub fn register(&mut self, def: ToolDefinition, handler: ToolHandler) -> &mut Self { + let name = def.name.clone(); + if self.handlers.contains_key(&name) { + panic!("tool already registered: {}", name); + } + self.handlers.insert(name.clone(), handler); + self.definitions.insert(name, def); + self + } + + /// Looks up a handler by tool name. + pub fn get(&self, name: &str) -> Option<&ToolHandler> { + self.handlers.get(name) + } + + pub fn definitions(&self) -> std::collections::hash_map::Values<'_, String, ToolDefinition> { + self.definitions.values() + } + + pub fn to_openai_tools(&self) -> Vec { + self.definitions + .values() + .map(|d| d.to_openai_tool()) + .collect() + } + + pub fn len(&self) -> usize { + self.handlers.len() + } + + pub fn is_empty(&self) -> bool { + self.handlers.is_empty() + } +} diff --git a/libs/api/Cargo.toml b/libs/api/Cargo.toml new file mode 100644 index 0000000..76ac35d --- /dev/null +++ b/libs/api/Cargo.toml @@ -0,0 +1,47 @@ +[package] +name = "api" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "api" + +[[bin]] +path = "gen_api.rs" +name = "gen_api" +[dependencies] +db = { workspace = true } +config = { workspace = true } +queue = { workspace = true } +email = { workspace = true } +slog = { workspace = true } +service = { workspace = true } +session = { workspace = true } +git = { workspace = true } +models = { workspace = true } +room = { workspace = true } +serde = { workspace = true, features = ["derive"] } +utoipa = { workspace = true, features = ["actix_extras", "chrono", "uuid", "preserve_order", "macros", "time"] } +serde_json = { workspace = true } +actix-web = { workspace = true } +uuid = { workspace = true } +anyhow = { workspace = true } +actix-cors = { workspace = true } +base64 = { workspace = true } +actix-ws = { workspace = true, features = [] } +actix = { workspace = true, features = ["macros"] } +tokio-stream = { workspace = true, features = ["sync"] } +futures = { workspace = true } +tokio = { workspace = true, features = ["sync", "rt"] } +chrono = { workspace = true } +[lints] +workspace = true diff --git a/libs/api/agent/code_review.rs b/libs/api/agent/code_review.rs new file mode 100644 index 0000000..6437df7 --- /dev/null +++ b/libs/api/agent/code_review.rs @@ -0,0 +1,39 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::code_review::{TriggerCodeReviewRequest, TriggerCodeReviewResponse}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/agents/code-review/{namespace}/{repo}", + request_body = TriggerCodeReviewRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, body = TriggerCodeReviewResponse, description = "AI code review triggered"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Repository or PR not found"), + ), + tag = "Agent" +)] +pub async fn trigger_code_review( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .trigger_ai_code_review( + namespace, + repo_name, + body.pr_number, + body.model_id, + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/agent/mod.rs b/libs/api/agent/mod.rs new file mode 100644 index 0000000..1a587aa --- /dev/null +++ b/libs/api/agent/mod.rs @@ -0,0 +1,120 @@ +pub mod code_review; +pub mod model; +pub mod model_capability; +pub mod model_parameter_profile; +pub mod model_pricing; +pub mod model_version; +pub mod pr_summary; +pub mod provider; + +use actix_web::web; + +pub fn init_agent_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/agents") + .route( + "/code-review/{namespace}/{repo}", + web::post().to(code_review::trigger_code_review), + ) + .route( + "/pr-description/{namespace}/{repo}", + web::post().to(pr_summary::generate_pr_description), + ) + .route("/providers", web::get().to(provider::provider_list)) + .route("/providers/{id}", web::get().to(provider::provider_get)) + .route("/providers", web::post().to(provider::provider_create)) + .route( + "/providers/{id}", + web::patch().to(provider::provider_update), + ) + .route( + "/providers/{id}", + web::delete().to(provider::provider_delete), + ) + .route("/models", web::get().to(model::model_list)) + .route("/models/{id}", web::get().to(model::model_get)) + .route("/models", web::post().to(model::model_create)) + .route("/models/{id}", web::patch().to(model::model_update)) + .route("/models/{id}", web::delete().to(model::model_delete)) + .route( + "/versions", + web::get().to(model_version::model_version_list), + ) + .route( + "/versions/{id}", + web::get().to(model_version::model_version_get), + ) + .route( + "/versions", + web::post().to(model_version::model_version_create), + ) + .route( + "/versions/{id}", + web::patch().to(model_version::model_version_update), + ) + .route( + "/versions/{id}", + web::delete().to(model_version::model_version_delete), + ) + .route( + "/versions/{model_version_id}/pricing", + web::get().to(model_pricing::model_pricing_list), + ) + .route( + "/pricing/{id}", + web::get().to(model_pricing::model_pricing_get), + ) + .route( + "/pricing", + web::post().to(model_pricing::model_pricing_create), + ) + .route( + "/pricing/{id}", + web::patch().to(model_pricing::model_pricing_update), + ) + .route( + "/pricing/{id}", + web::delete().to(model_pricing::model_pricing_delete), + ) + .route( + "/versions/{model_version_id}/capabilities", + web::get().to(model_capability::model_capability_list), + ) + .route( + "/capabilities/{id}", + web::get().to(model_capability::model_capability_get), + ) + .route( + "/capabilities", + web::post().to(model_capability::model_capability_create), + ) + .route( + "/capabilities/{id}", + web::patch().to(model_capability::model_capability_update), + ) + .route( + "/capabilities/{id}", + web::delete().to(model_capability::model_capability_delete), + ) + .route( + "/versions/{model_version_id}/parameters", + web::get().to(model_parameter_profile::model_parameter_profile_list), + ) + .route( + "/parameters/{id}", + web::get().to(model_parameter_profile::model_parameter_profile_get), + ) + .route( + "/parameters", + web::post().to(model_parameter_profile::model_parameter_profile_create), + ) + .route( + "/parameters/{id}", + web::patch().to(model_parameter_profile::model_parameter_profile_update), + ) + .route( + "/parameters/{id}", + web::delete().to(model_parameter_profile::model_parameter_profile_delete), + ), + ); +} diff --git a/libs/api/agent/model.rs b/libs/api/agent/model.rs new file mode 100644 index 0000000..a24c3c0 --- /dev/null +++ b/libs/api/agent/model.rs @@ -0,0 +1,132 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::model::{CreateModelRequest, UpdateModelRequest}; +use session::Session; +use uuid::Uuid; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ListQuery { + pub provider_id: Option, +} + +#[utoipa::path( + get, + path = "/api/agents/models", + params(ListQuery), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn model_list( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let provider_id = if let Some(ref s) = query.provider_id { + Some(Uuid::parse_str(s).map_err(|_| { + service::error::AppError::BadRequest("Invalid provider UUID".to_string()) + })?) + } else { + None + }; + let resp = service.agent_model_list(provider_id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/models/{id}", + params(("id" = String, Path)), + responses( + (status = 200, body = service::agent::model::ModelResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service.agent_model_get(id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/models", + request_body = CreateModelRequest, + responses( + (status = 200, body = service::agent::model::ModelResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_model_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/models/{id}", + params(("id" = String, Path)), + request_body = UpdateModelRequest, + responses( + (status = 200, body = service::agent::model::ModelResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service + .agent_model_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/models/{id}", + params(("id" = String, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service.agent_model_delete(id, &session).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/agent/model_capability.rs b/libs/api/agent/model_capability.rs new file mode 100644 index 0000000..e3b3d33 --- /dev/null +++ b/libs/api/agent/model_capability.rs @@ -0,0 +1,120 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::model_capability::{ + CreateModelCapabilityRequest, UpdateModelCapabilityRequest, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/agents/versions/{model_version_id}/capabilities", + params(("model_version_id" = i64, Path)), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn model_capability_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let model_version_id = path.into_inner(); + let resp = service + .agent_model_capability_list(model_version_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/capabilities/{id}", + params(("id" = i64, Path)), + responses( + (status = 200, body = service::agent::model_capability::ModelCapabilityResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_capability_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + let resp = service.agent_model_capability_get(id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/capabilities", + request_body = CreateModelCapabilityRequest, + responses( + (status = 200, body = service::agent::model_capability::ModelCapabilityResponse), + (status = 401), + (status = 403), + ), + tag = "Agent" +)] +pub async fn model_capability_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_model_capability_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/capabilities/{id}", + params(("id" = i64, Path)), + request_body = UpdateModelCapabilityRequest, + responses( + (status = 200, body = service::agent::model_capability::ModelCapabilityResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_capability_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = path.into_inner(); + let resp = service + .agent_model_capability_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/capabilities/{id}", + params(("id" = i64, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_capability_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + service.agent_model_capability_delete(id, &session).await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} diff --git a/libs/api/agent/model_parameter_profile.rs b/libs/api/agent/model_parameter_profile.rs new file mode 100644 index 0000000..6e0271b --- /dev/null +++ b/libs/api/agent/model_parameter_profile.rs @@ -0,0 +1,126 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::model_parameter_profile::{ + CreateModelParameterProfileRequest, UpdateModelParameterProfileRequest, +}; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/agents/versions/{model_version_id}/parameters", + params(("model_version_id" = String, Path)), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn model_parameter_profile_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let model_version_id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service + .agent_model_parameter_profile_list(model_version_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/parameters/{id}", + params(("id" = i64, Path)), + responses( + (status = 200, body = service::agent::model_parameter_profile::ModelParameterProfileResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_parameter_profile_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + let resp = service + .agent_model_parameter_profile_get(id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/parameters", + request_body = CreateModelParameterProfileRequest, + responses( + (status = 200, body = service::agent::model_parameter_profile::ModelParameterProfileResponse), + (status = 401), + (status = 403), + ), + tag = "Agent" +)] +pub async fn model_parameter_profile_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_model_parameter_profile_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/parameters/{id}", + params(("id" = i64, Path)), + request_body = UpdateModelParameterProfileRequest, + responses( + (status = 200, body = service::agent::model_parameter_profile::ModelParameterProfileResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_parameter_profile_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = path.into_inner(); + let resp = service + .agent_model_parameter_profile_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/parameters/{id}", + params(("id" = i64, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_parameter_profile_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + service + .agent_model_parameter_profile_delete(id, &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} diff --git a/libs/api/agent/model_pricing.rs b/libs/api/agent/model_pricing.rs new file mode 100644 index 0000000..a58260f --- /dev/null +++ b/libs/api/agent/model_pricing.rs @@ -0,0 +1,120 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::model_pricing::{CreateModelPricingRequest, UpdateModelPricingRequest}; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/agents/versions/{model_version_id}/pricing", + params(("model_version_id" = String, Path)), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn model_pricing_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let model_version_id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service + .agent_model_pricing_list(model_version_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/pricing/{id}", + params(("id" = i64, Path)), + responses( + (status = 200, body = service::agent::model_pricing::ModelPricingResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_pricing_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + let resp = service.agent_model_pricing_get(id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/pricing", + request_body = CreateModelPricingRequest, + responses( + (status = 200, body = service::agent::model_pricing::ModelPricingResponse), + (status = 401), + (status = 403), + ), + tag = "Agent" +)] +pub async fn model_pricing_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_model_pricing_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/pricing/{id}", + params(("id" = i64, Path)), + request_body = UpdateModelPricingRequest, + responses( + (status = 200, body = service::agent::model_pricing::ModelPricingResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_pricing_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = path.into_inner(); + let resp = service + .agent_model_pricing_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/pricing/{id}", + params(("id" = i64, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_pricing_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + service.agent_model_pricing_delete(id, &session).await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} diff --git a/libs/api/agent/model_version.rs b/libs/api/agent/model_version.rs new file mode 100644 index 0000000..de78f26 --- /dev/null +++ b/libs/api/agent/model_version.rs @@ -0,0 +1,132 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::model_version::{CreateModelVersionRequest, UpdateModelVersionRequest}; +use session::Session; +use uuid::Uuid; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ListQuery { + pub model_id: Option, +} + +#[utoipa::path( + get, + path = "/api/agents/versions", + params(ListQuery), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn model_version_list( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let model_id = + if let Some(ref s) = query.model_id { + Some(Uuid::parse_str(s).map_err(|_| { + service::error::AppError::BadRequest("Invalid model UUID".to_string()) + })?) + } else { + None + }; + let resp = service.agent_model_version_list(model_id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/versions/{id}", + params(("id" = String, Path)), + responses( + (status = 200, body = service::agent::model_version::ModelVersionResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_version_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service.agent_model_version_get(id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/versions", + request_body = CreateModelVersionRequest, + responses( + (status = 200, body = service::agent::model_version::ModelVersionResponse), + (status = 401), + (status = 403), + ), + tag = "Agent" +)] +pub async fn model_version_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_model_version_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/versions/{id}", + params(("id" = String, Path)), + request_body = UpdateModelVersionRequest, + responses( + (status = 200, body = service::agent::model_version::ModelVersionResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_version_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service + .agent_model_version_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/versions/{id}", + params(("id" = String, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn model_version_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service.agent_model_version_delete(id, &session).await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} diff --git a/libs/api/agent/pr_summary.rs b/libs/api/agent/pr_summary.rs new file mode 100644 index 0000000..ef19890 --- /dev/null +++ b/libs/api/agent/pr_summary.rs @@ -0,0 +1,33 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::pr_summary::{GeneratePrDescriptionRequest, GeneratePrDescriptionResponse}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/agents/pr-description/{namespace}/{repo}", + request_body = GeneratePrDescriptionRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, body = GeneratePrDescriptionResponse, description = "AI-generated PR description"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Repository or PR not found"), + ), + tag = "Agent" +)] +pub async fn generate_pr_description( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .generate_pr_description(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/agent/provider.rs b/libs/api/agent/provider.rs new file mode 100644 index 0000000..0dc1b56 --- /dev/null +++ b/libs/api/agent/provider.rs @@ -0,0 +1,117 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::agent::provider::{CreateProviderRequest, UpdateProviderRequest}; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/agents/providers", + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + tag = "Agent" +)] +pub async fn provider_list( + service: web::Data, + session: Session, +) -> Result { + let resp = service.agent_provider_list(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/agents/providers/{id}", + params(("id" = String, Path, description = "Provider UUID")), + responses( + (status = 200, body = service::agent::provider::ProviderResponse), + (status = 401, description = "Unauthorized"), + (status = 404), + ), + tag = "Agent" +)] +pub async fn provider_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service.agent_provider_get(id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/agents/providers", + request_body = CreateProviderRequest, + responses( + (status = 200, body = service::agent::provider::ProviderResponse), + (status = 401), + (status = 403), + ), + tag = "Agent" +)] +pub async fn provider_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .agent_provider_create(body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/agents/providers/{id}", + params(("id" = String, Path)), + request_body = UpdateProviderRequest, + responses( + (status = 200, body = service::agent::provider::ProviderResponse), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn provider_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + let resp = service + .agent_provider_update(id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/agents/providers/{id}", + params(("id" = String, Path)), + responses( + (status = 200), + (status = 401), + (status = 403), + (status = 404), + ), + tag = "Agent" +)] +pub async fn provider_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = Uuid::parse_str(&path.into_inner()) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service.agent_provider_delete(id, &session).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/auth/captcha.rs b/libs/api/auth/captcha.rs new file mode 100644 index 0000000..494568c --- /dev/null +++ b/libs/api/auth/captcha.rs @@ -0,0 +1,26 @@ +use crate::ApiResponse; +use crate::error::ApiError; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::captcha::{CaptchaQuery, CaptchaResponse}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/captcha", + request_body = CaptchaQuery, + responses( + (status = 200, description = "Captcha generated", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_auth_captcha( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service.auth_captcha(&session, body.into_inner()).await?; + Ok(HttpResponse::Ok().json(ApiResponse::ok(resp))) +} diff --git a/libs/api/auth/email.rs b/libs/api/auth/email.rs new file mode 100644 index 0000000..907191d --- /dev/null +++ b/libs/api/auth/email.rs @@ -0,0 +1,62 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::email::{EmailChangeRequest, EmailResponse, EmailVerifyRequest}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/email", + responses( + (status = 200, description = "Current email address", body = ApiResponse), + (status = 401, description = "Unauthorized"), + ), + tag = "Auth" +)] +pub async fn api_email_get( + service: web::Data, + session: Session, +) -> Result { + let email = service.auth_get_email(&session).await?; + Ok(ApiResponse::ok(email).to_response()) +} + +#[utoipa::path( + post, + path = "/api/auth/email/change", + request_body = EmailChangeRequest, + responses( + (status = 200, description = "Verification email sent", body = ApiResponse), + (status = 401, description = "Unauthorized or invalid password"), + (status = 409, description = "Email already in use"), + ), + tag = "Auth" +)] +pub async fn api_email_change( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + service + .auth_email_change_request(&session, body.into_inner()) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/auth/email/verify", + request_body = EmailVerifyRequest, + responses( + (status = 200, description = "Email updated successfully", body = ApiResponse), + (status = 400, description = "Invalid or expired token"), + ), + tag = "Auth" +)] +pub async fn api_email_verify( + service: web::Data, + body: web::Json, +) -> Result { + service.auth_email_verify(body.into_inner()).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/auth/login.rs b/libs/api/auth/login.rs new file mode 100644 index 0000000..076639d --- /dev/null +++ b/libs/api/auth/login.rs @@ -0,0 +1,28 @@ +use crate::ApiResponse; +use crate::error::ApiError; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::login::LoginParams; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/login", + request_body = LoginParams, + responses( + (status = 200, description = "Login successful", body = ApiResponse), + (status = 401, description = "Invalid credentials", body = ApiResponse), + (status = 428, description = "Two-factor authentication required", body = ApiResponse), + (status = 400, description = "Bad request", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_auth_login( + service: web::Data, + session: Session, + params: web::Json, +) -> Result { + service.auth_login(params.into_inner(), session).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/auth/logout.rs b/libs/api/auth/logout.rs new file mode 100644 index 0000000..a57698b --- /dev/null +++ b/libs/api/auth/logout.rs @@ -0,0 +1,24 @@ +use crate::ApiResponse; +use crate::error::ApiError; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/logout", + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Logout successful", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_auth_logout( + service: web::Data, + session: Session, +) -> Result { + service.auth_logout(&session).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/auth/me.rs b/libs/api/auth/me.rs new file mode 100644 index 0000000..de504a6 --- /dev/null +++ b/libs/api/auth/me.rs @@ -0,0 +1,24 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::me::ContextMe; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/me", + responses( + (status = 200, description = "Current user info", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_auth_me( + service: web::Data, + session: Session, +) -> Result { + let me = service.auth_me(session).await?; + Ok(ApiResponse::ok(me).to_response()) +} diff --git a/libs/api/auth/mod.rs b/libs/api/auth/mod.rs new file mode 100644 index 0000000..4b2fed6 --- /dev/null +++ b/libs/api/auth/mod.rs @@ -0,0 +1,68 @@ +pub mod captcha; +pub mod email; +pub mod login; +pub mod logout; +pub mod me; +pub mod password; +pub mod register; +pub mod totp; +pub mod ws_token; + +pub fn init_auth_routes(cfg: &mut actix_web::web::ServiceConfig) { + cfg.service( + actix_web::web::scope("/auth") + .route("/login", actix_web::web::post().to(login::api_auth_login)) + .route( + "/register", + actix_web::web::post().to(register::api_auth_register), + ) + .route( + "/logout", + actix_web::web::post().to(logout::api_auth_logout), + ) + .route( + "/captcha", + actix_web::web::post().to(captcha::api_auth_captcha), + ) + .route("/me", actix_web::web::post().to(me::api_auth_me)) + .route( + "/password/change", + actix_web::web::post().to(password::api_user_change_password), + ) + .route( + "/password/reset", + actix_web::web::post().to(password::api_user_request_password_reset), + ) + .route( + "/2fa/enable", + actix_web::web::post().to(totp::api_2fa_enable), + ) + .route( + "/2fa/verify", + actix_web::web::post().to(totp::api_2fa_verify), + ) + .route( + "/2fa/disable", + actix_web::web::post().to(totp::api_2fa_disable), + ) + .route( + "/2fa/status", + actix_web::web::post().to(totp::api_2fa_status), + ) + .route("/email", actix_web::web::post().to(email::api_email_get)) + .route( + "/email/change", + actix_web::web::post().to(email::api_email_change), + ) + .route( + "/email/verify", + actix_web::web::post().to(email::api_email_verify), + ), + ); + + // WebSocket token endpoint + cfg.route( + "/ws/token", + actix_web::web::post().to(ws_token::ws_token_generate), + ); +} diff --git a/libs/api/auth/password.rs b/libs/api/auth/password.rs new file mode 100644 index 0000000..f452106 --- /dev/null +++ b/libs/api/auth/password.rs @@ -0,0 +1,53 @@ +use crate::ApiResponse; +use crate::error::ApiError; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::password::{ChangePasswordParams, ResetPasswordParams}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/password/change", + request_body = ChangePasswordParams, + responses( + (status = 200, description = "Password changed successfully", body = ApiResponse), + (status = 401, description = "Unauthorized or invalid password", body = ApiResponse), + (status = 400, description = "Bad request", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_user_change_password( + service: web::Data, + session: Session, + params: web::Json, +) -> Result { + service + .auth_change_password(&session, params.into_inner()) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/auth/password/reset", + request_body = ResetPasswordParams, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Password reset email sent", body = ApiResponse), + (status = 404, description = "User not found", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_user_request_password_reset( + service: web::Data, + _session: Session, + params: web::Json, +) -> Result { + service + .auth_request_password_reset(params.into_inner()) + .await?; + Ok(crate::api_success()) +} diff --git a/libs/api/auth/register.rs b/libs/api/auth/register.rs new file mode 100644 index 0000000..f835d82 --- /dev/null +++ b/libs/api/auth/register.rs @@ -0,0 +1,48 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use models::DateTime; +use serde::Serialize; +use service::AppService; +use service::auth::register::RegisterParams; +use session::Session; +use utoipa::ToSchema; + +#[derive(Serialize, ToSchema)] +pub struct RegisterResponse { + uid: String, + username: String, + display_name: Option, + avatar_url: Option, + #[schema(ignore)] + created_at: DateTime, +} + +#[utoipa::path( + post, + path = "/api/auth/register", + request_body = RegisterParams, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Registration successful", body = ApiResponse), + (status = 400, description = "Bad request", body = ApiResponse), + (status = 409, description = "Username or email already exists", body = ApiResponse), + (status = 500, description = "Internal server error", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_auth_register( + service: web::Data, + session: Session, + params: web::Json, +) -> Result { + let user = service.auth_register(params.into_inner(), &session).await?; + let payload = RegisterResponse { + uid: user.uid.to_string(), + username: user.username, + display_name: user.display_name, + avatar_url: user.avatar_url, + created_at: user.created_at.naive_utc(), + }; + Ok(ApiResponse::ok(payload).to_response()) +} diff --git a/libs/api/auth/totp.rs b/libs/api/auth/totp.rs new file mode 100644 index 0000000..ed66d2b --- /dev/null +++ b/libs/api/auth/totp.rs @@ -0,0 +1,94 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::auth::totp::{ + Disable2FAParams, Enable2FAResponse, Get2FAStatusResponse, Verify2FAParams, +}; +use session::Session; + +#[utoipa::path( + post, + path = "/api/auth/2fa/enable", + responses( + (status = 200, description = "2FA setup initiated", body = Enable2FAResponse), + (status = 401, description = "Unauthorized"), + (status = 409, description = "2FA already enabled"), + (status = 500, description = "Internal server error"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_2fa_enable( + service: web::Data, + session: Session, +) -> Result { + let resp = service.auth_2fa_enable(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/auth/2fa/verify", + request_body = Verify2FAParams, + responses( + (status = 200, description = "2FA verified and enabled"), + (status = 401, description = "Unauthorized or invalid code"), + (status = 400, description = "2FA not set up"), + (status = 500, description = "Internal server error"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_2fa_verify( + service: web::Data, + session: Session, + params: web::Json, +) -> Result { + service + .auth_2fa_verify_and_enable(&session, params.into_inner()) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/auth/2fa/disable", + request_body = Disable2FAParams, + responses( + (status = 200, description = "2FA disabled"), + (status = 401, description = "Unauthorized"), + (status = 400, description = "2FA not enabled or invalid code/password"), + (status = 500, description = "Internal server error"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_2fa_disable( + service: web::Data, + session: Session, + params: web::Json, +) -> Result { + service + .auth_2fa_disable(&session, params.into_inner()) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/auth/2fa/status", + responses( + (status = 200, description = "2FA status", body = Get2FAStatusResponse), + (status = 401, description = "Unauthorized"), + (status = 500, description = "Internal server error"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Auth" +)] +pub async fn api_2fa_status( + service: web::Data, + session: Session, +) -> Result { + let resp = service.auth_2fa_status(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/auth/ws_token.rs b/libs/api/auth/ws_token.rs new file mode 100644 index 0000000..3bc2a53 --- /dev/null +++ b/libs/api/auth/ws_token.rs @@ -0,0 +1,46 @@ +use actix_web::{HttpResponse, Result, web}; +use serde::Serialize; +use session::SessionUser; +use utoipa::ToSchema; + +use crate::ApiResponse; +use crate::error::ApiError; +use service::AppService; +use service::ws_token::WS_TOKEN_TTL_SECONDS; + +#[derive(Debug, Serialize, ToSchema)] +pub struct WsTokenResponse { + pub token: String, + pub expires_in_seconds: i64, +} + +/// Returns a short-lived token that can be used to authenticate WebSocket connections +/// by passing it as a query parameter: `ws://host/ws?token=xxx` +#[utoipa::path( + post, + path = "/api/ws/token", + responses( + (status = 200, description = "Token generated successfully", body = ApiResponse), + (status = 401, description = "Unauthorized - not logged in", body = ApiResponse), + ), + tag = "WebSocket" +)] +pub async fn ws_token_generate( + service: web::Data, + session_user: SessionUser, +) -> Result { + let SessionUser(user_id) = session_user; + + let token = service + .ws_token + .generate_token(user_id) + .await + .map_err(ApiError::from)?; + + let response = WsTokenResponse { + token, + expires_in_seconds: WS_TOKEN_TTL_SECONDS, + }; + + Ok(ApiResponse::ok(response).to_response()) +} diff --git a/libs/api/error.rs b/libs/api/error.rs new file mode 100644 index 0000000..ac716d9 --- /dev/null +++ b/libs/api/error.rs @@ -0,0 +1,113 @@ +use actix_web::{HttpResponse, ResponseError}; +use serde::Serialize; +use service::error::AppError; +use utoipa::openapi::schema::{KnownFormat, ObjectBuilder, SchemaFormat, Type}; +use utoipa::openapi::{RefOr, Schema}; +use utoipa::{PartialSchema, ToSchema}; + +#[derive(Debug, Serialize, ToSchema)] +pub struct ApiResponse { + pub code: i32, + pub message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, +} + +impl ApiResponse { + pub fn ok(data: T) -> Self { + Self { + code: 0, + message: "ok".to_string(), + data: Some(data), + } + } + + pub fn to_response(self) -> HttpResponse { + HttpResponse::Ok().json(self) + } +} + +pub fn api_success() -> HttpResponse { + HttpResponse::Ok().json(ApiResponse { + code: 0, + message: "ok".to_string(), + data: None::<()>, + }) +} + +#[derive(Debug, Serialize, ToSchema)] +pub struct ApiErrorResponse { + pub code: i32, + pub error: String, + pub message: String, +} + +#[derive(Debug)] +pub struct ApiError(pub AppError); + +impl std::fmt::Display for ApiError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.user_message().fmt(f) + } +} + +impl std::error::Error for ApiError {} + +impl From for ApiError { + fn from(e: AppError) -> Self { + ApiError(e) + } +} + +impl From for ApiError { + fn from(e: room::RoomError) -> Self { + ApiError(e.into()) + } +} + +impl ResponseError for ApiError { + fn error_response(&self) -> HttpResponse { + let err = &self.0; + let status = actix_web::http::StatusCode::from_u16(err.http_status_code()) + .unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR); + let resp = ApiErrorResponse { + code: err.code(), + error: err.slug().to_string(), + message: err.user_message(), + }; + HttpResponse::build(status).json(resp) + } +} + +impl PartialSchema for ApiError { + fn schema() -> RefOr { + RefOr::T(Schema::Object( + ObjectBuilder::new() + .property( + "code", + ObjectBuilder::new() + .schema_type(Type::Integer) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Int32))) + .description(Some("Error numeric code")), + ) + .property( + "error", + ObjectBuilder::new() + .schema_type(Type::String) + .description(Some("Error slug identifier")), + ) + .property( + "message", + ObjectBuilder::new() + .schema_type(Type::String) + .description(Some("Human-readable error message")), + ) + .required("code") + .required("error") + .required("message") + .into(), + )) + } +} + +impl ToSchema for ApiError {} diff --git a/libs/api/gen_api.rs b/libs/api/gen_api.rs new file mode 100644 index 0000000..15e00d9 --- /dev/null +++ b/libs/api/gen_api.rs @@ -0,0 +1,11 @@ +use utoipa::OpenApi; + +fn main() { + let out = api::openapi::OpenApiDoc::openapi().to_pretty_json(); + if let Ok(out) = out { + std::fs::write("openapi.json", out).unwrap(); + } else { + panic!("Failed to generate openapi.json"); + } + std::process::exit(0); +} diff --git a/libs/api/git/archive.rs b/libs/api/git/archive.rs new file mode 100644 index 0000000..4fd736c --- /dev/null +++ b/libs/api/git/archive.rs @@ -0,0 +1,195 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::archive::{ + ArchiveCachedResponse, ArchiveInvalidateAllResponse, ArchiveInvalidateResponse, + ArchiveListResponse, ArchiveQuery, ArchiveResponse, ArchiveSummaryResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Query), + ("format" = String, Query), + ("prefix" = Option, Query), + ("max_depth" = Option, Query), + ("path_filter" = Option, Query), + ), + responses( + (status = 200, description = "Get archive", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_archive(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive/list", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Query), + ("format" = String, Query), + ("prefix" = Option, Query), + ("max_depth" = Option, Query), + ("path_filter" = Option, Query), + ), + responses( + (status = 200, description = "List archive entries", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_archive_list(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive/summary", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Query), + ("format" = String, Query), + ("prefix" = Option, Query), + ("max_depth" = Option, Query), + ("path_filter" = Option, Query), + ), + responses( + (status = 200, description = "Get archive summary", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive_summary( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_archive_summary(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive/cached", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Query), + ("format" = String, Query), + ("prefix" = Option, Query), + ("max_depth" = Option, Query), + ("path_filter" = Option, Query), + ), + responses( + (status = 200, description = "Check if archive is cached", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive_cached( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_archive_cached(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive/invalidate", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Query), + ("format" = String, Query), + ("prefix" = Option, Query), + ("max_depth" = Option, Query), + ("path_filter" = Option, Query), + ), + responses( + (status = 200, description = "Invalidate archive cache", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive_invalidate( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_archive_invalidate(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/archive/invalidate/{commit_oid}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Path), + ), + responses( + (status = 200, description = "Invalidate all archive caches for commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_archive_invalidate_all( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, commit_oid) = path.into_inner(); + let resp = service + .git_archive_invalidate_all(namespace, repo_name, commit_oid, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/blame.rs b/libs/api/git/blame.rs new file mode 100644 index 0000000..800a921 --- /dev/null +++ b/libs/api/git/blame.rs @@ -0,0 +1,37 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::blame::BlameQuery; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blame/{commit_oid}/{tail:.*}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("commit_oid" = String, Path), + ("tail" = String, Path, description = "File path within the repository"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, body = Vec), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blame_file( + service: web::Data, + session: Session, + path: web::Path<(String, String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, commit_oid, file_path) = path.into_inner(); + let mut req = query.into_inner(); + req.commit_oid = commit_oid; + req.path = file_path; + let resp = service + .git_blame_file(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/blob.rs b/libs/api/git/blob.rs new file mode 100644 index 0000000..b1e0882 --- /dev/null +++ b/libs/api/git/blob.rs @@ -0,0 +1,213 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::blob::{ + BlobContentResponse, BlobCreateRequest, BlobCreateResponse, BlobExistsResponse, BlobGetQuery, + BlobInfoResponse, BlobIsBinaryResponse, BlobSizeResponse, GitReadmeQuery, GitReadmeResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/readme", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("ref" = Option, Query, description = "Git reference (branch, tag, commit). Defaults to HEAD."), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get README content", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_readme( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_readme(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blob/{oid}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Blob object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get blob info", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_blob_get(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blob/{oid}/exists", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Blob object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check blob exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_blob_exists(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blob/{oid}/is-binary", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Blob object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if blob is binary", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_is_binary( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_blob_is_binary(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blob/{oid}/content", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Blob object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get blob content", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_content( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_blob_content(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/blob/{oid}/size", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Blob object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get blob size", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_size( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_blob_size(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/blob", + request_body = BlobCreateRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Create blob", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_blob_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_blob_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/branch.rs b/libs/api/git/branch.rs new file mode 100644 index 0000000..4f9b6e4 --- /dev/null +++ b/libs/api/git/branch.rs @@ -0,0 +1,579 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::branch::{ + BranchCreateRequest, BranchDiffQuery, BranchDiffResponse, BranchExistsResponse, + BranchFastForwardResponse, BranchInfoResponse, BranchIsAncestorQuery, BranchIsAncestorResponse, + BranchIsConflictedResponse, BranchIsDetachedResponse, BranchIsHeadResponse, + BranchIsMergedQuery, BranchIsMergedResponse, BranchListQuery, BranchMergeBaseQuery, + BranchMergeBaseResponse, BranchMoveRequest, BranchRenameRequest, BranchSetUpstreamRequest, + BranchSummaryResponse, BranchTrackingDiffResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List branches", body = ApiResponse>), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_list(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/summary", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get branch summary", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_summary( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_summary(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_branch_get(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/current", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get current branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_current( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_current(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}/exists", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check branch exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_branch_exists(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}/is-head", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if branch is HEAD", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_is_head( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_branch_is_head(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/is-detached", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if HEAD is detached", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_is_detached( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_is_detached(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}/upstream", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get upstream branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_upstream( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_branch_upstream(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/diff", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get branch diff", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_diff( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_diff(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}/tracking-difference", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tracking difference", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_tracking_difference( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_branch_tracking_difference(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/branches", + request_body = BranchCreateRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Create branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/branches/{name}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Delete branch"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + service + .git_branch_delete(namespace, repo_name, name, &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/branches/remote/{name}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Remote branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Delete remote branch"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_delete_remote( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + service + .git_branch_delete_remote(namespace, repo_name, name, &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/branches/rename", + request_body = BranchRenameRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Rename branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_rename( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_rename(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/branches/move", + request_body = BranchMoveRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Move branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_move( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_move(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/branches/upstream", + request_body = BranchSetUpstreamRequest, + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Set upstream branch"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_set_upstream( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_branch_set_upstream(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/is-merged", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if branch is merged", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_is_merged( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_is_merged(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/merge-base", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get merge base", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_merge_base( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_merge_base(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/is-ancestor", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if branch is ancestor", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_is_ancestor( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_is_ancestor(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/branches/fast-forward/{target}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("target" = String, Path, description = "Target branch name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Fast-forward branch", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_fast_forward( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, target) = path.into_inner(); + let resp = service + .git_branch_fast_forward(namespace, repo_name, target, None, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/branches/is-conflicted", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if branch has conflicts", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_branch_is_conflicted( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_branch_is_conflicted(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/branch_protection.rs b/libs/api/git/branch_protection.rs new file mode 100644 index 0000000..78bd4d4 --- /dev/null +++ b/libs/api/git/branch_protection.rs @@ -0,0 +1,177 @@ +use crate::error::ApiError; +use crate::{ApiResponse, api_success}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::branch_protection::{ + ApprovalCheckResult, BranchProtectionCreateRequest, BranchProtectionResponse, + BranchProtectionUpdateRequest, +}; +use session::Session; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ProtectionCheckQuery { + pub pr_number: i64, +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/branch-protections", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List branch protection rules", body = ApiResponse>), + ), + tag = "Git" +)] +pub async fn branch_protection_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo) = path.into_inner(); + let rules = service + .branch_protection_list(namespace, repo, &session) + .await?; + Ok(ApiResponse::ok(rules).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/branch-protections/{id}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("id" = i64, Path, description = "Rule id"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get a branch protection rule", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn branch_protection_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, id) = path.into_inner(); + let rule = service + .branch_protection_get(namespace, repo, id, &session) + .await?; + Ok(ApiResponse::ok(rule).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/branch-protections", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = BranchProtectionCreateRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Create a branch protection rule", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn branch_protection_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo) = path.into_inner(); + let rule = service + .branch_protection_create(namespace, repo, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(rule).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/branch-protections/{id}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("id" = i64, Path, description = "Rule id"), + ), + request_body = BranchProtectionUpdateRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Update a branch protection rule", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn branch_protection_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, id) = path.into_inner(); + let rule = service + .branch_protection_update(namespace, repo, id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(rule).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/branch-protections/{id}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("id" = i64, Path, description = "Rule id"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Delete a branch protection rule"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn branch_protection_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, id) = path.into_inner(); + service + .branch_protection_delete(namespace, repo, id, &session) + .await?; + Ok(api_success()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/branch-protections/check-approvals", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("pr_number" = i64, Query, description = "Pull request number"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check approval count against branch protection", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn branch_protection_check_approvals( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo) = path.into_inner(); + let result = service + .branch_protection_check_approvals(namespace, repo, query.pr_number, &session) + .await?; + Ok(ApiResponse::ok(result).to_response()) +} diff --git a/libs/api/git/commit.rs b/libs/api/git/commit.rs new file mode 100644 index 0000000..02ad5f3 --- /dev/null +++ b/libs/api/git/commit.rs @@ -0,0 +1,1002 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::commit::{ + CommitAmendRequest, CommitAncestorsQuery, CommitCherryPickAbortRequest, + CommitCherryPickRequest, CommitCreateRequest, CommitCreateResponse, CommitDescendantsQuery, + CommitGetQuery, CommitGraphReactResponse, CommitLogQuery, CommitLogResponse, + CommitResolveQuery, CommitRevertAbortRequest, CommitRevertRequest, CommitWalkQuery, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit metadata", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_commit_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_get(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/exists", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Check if commit exists", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_exists(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-commit", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Check if object is a commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_is_commit( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_is_commit(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/message", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit message", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_message( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_message(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/summary", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit summary", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_summary( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_summary(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/short-id", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit short ID", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_short_id( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_short_id(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/author", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit author", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_author( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_author(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/tree-id", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit tree ID", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_tree_id( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_tree_id(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent-count", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit parent count", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_parent_count( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_parent_count(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent-ids", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit parent IDs", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_parent_ids( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_parent_ids(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent/{index}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("index" = usize, Path), + ), + responses( + (status = 200, description = "Get commit parent", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_parent( + service: web::Data, + session: Session, + path: web::Path<(String, String, String, usize)>, +) -> Result { + let (namespace, repo_name, oid, index) = path.into_inner(); + let resp = service + .git_commit_parent(namespace, repo_name, oid, index, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/first-parent", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit first parent", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_first_parent( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_first_parent(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-merge", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Check if commit is a merge", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_is_merge( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_is_merge(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("rev" = Option, Query), + ("per_page" = Option, Query), + ("page" = Option, Query), + ), + responses( + (status = 200, description = "Get commit log (paginated)", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_log( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_log(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/count", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit count", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_count( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_count( + namespace, + repo_name, + query.from.clone(), + query.to.clone(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/refs", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit refs", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_refs( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_refs(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/branches", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit branches", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_branches( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_branches(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/tags", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit tags", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_tags( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_tags(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-tip", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Check if commit is a tip", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_is_tip( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_is_tip(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/ref-count", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit ref count", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_ref_count( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_ref_count(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/reflog", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit reflog", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_reflog( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, + refname: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_reflog( + namespace, + repo_name, + query.into_inner(), + refname.refname.clone(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/graph", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit graph", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_graph( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_graph(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +/// Returns commit graph data enriched with full commit metadata (author, timestamp, +/// parents, lane_index) for use with @gitgraph/react on the frontend. +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/graph-react", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit graph for gitgraph-react", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_commit_graph_react( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_graph_react(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/walk", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Walk commits", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_walk( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_walk(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/ancestors", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit ancestors", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_ancestors( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_ancestors(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/descendants", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get commit descendants", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_descendants( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_commit_descendants(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/commits/resolve", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Resolve revision to commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_resolve_rev( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_resolve_rev(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/commits", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitCreateRequest, + responses( + (status = 200, description = "Create commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_commit_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/amend", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitAmendRequest, + responses( + (status = 200, description = "Amend commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_amend( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, _oid) = path.into_inner(); + let resp = service + .git_commit_amend(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/cherry-pick", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitCherryPickRequest, + responses( + (status = 200, description = "Cherry-pick commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_cherry_pick( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, _oid) = path.into_inner(); + let resp = service + .git_commit_cherry_pick(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/cherry-pick/abort", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitCherryPickAbortRequest, + responses( + (status = 200, description = "Abort cherry-pick", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_cherry_pick_abort( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, _oid) = path.into_inner(); + service + .git_commit_cherry_pick_abort(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(true).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/revert", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitRevertRequest, + responses( + (status = 200, description = "Revert commit", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_revert( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, _oid) = path.into_inner(); + let resp = service + .git_commit_revert(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/commits/{oid}/revert/abort", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = CommitRevertAbortRequest, + responses( + (status = 200, description = "Abort revert", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_commit_revert_abort( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, _oid) = path.into_inner(); + service + .git_commit_revert_abort(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(true).to_response()) +} + +// Query helpers +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct CommitCountQuery { + pub from: Option, + pub to: Option, +} + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct CommitReflogQuery { + pub refname: Option, +} diff --git a/libs/api/git/contributors.rs b/libs/api/git/contributors.rs new file mode 100644 index 0000000..1bd21d5 --- /dev/null +++ b/libs/api/git/contributors.rs @@ -0,0 +1,33 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::contributors::{ContributorsQuery, ContributorsResponse}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/contributors", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "List of contributors", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" + +)] +pub async fn git_contributors( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_contributors(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/diff.rs b/libs/api/git/diff.rs new file mode 100644 index 0000000..33d709d --- /dev/null +++ b/libs/api/git/diff.rs @@ -0,0 +1,232 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::diff::{ + DiffCommitQuery, DiffPatchIdResponse, DiffQuery, DiffResultResponse, DiffStatsResponse, + SideBySideDiffQuery, SideBySideDiffResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("old_tree" = String, Query, description = "Old tree OID (commit or tree SHA)"), + ("new_tree" = String, Query, description = "New tree OID (commit or tree SHA)"), + ), + responses( + (status = 200, description = "Tree to tree diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_tree_to_tree( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_tree_to_tree(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/commit/{commit}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("commit" = String, Path, description = "Commit identifier"), + ), + responses( + (status = 200, description = "Commit to workdir diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_commit_to_workdir( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, commit) = path.into_inner(); + let mut req = query.into_inner(); + req.commit = commit; + let resp = service + .git_diff_commit_to_workdir(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/commit/{commit}/index", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("commit" = String, Path, description = "Commit identifier"), + ), + responses( + (status = 200, description = "Commit to index diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_commit_to_index( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, commit) = path.into_inner(); + let mut req = query.into_inner(); + req.commit = commit; + let resp = service + .git_diff_commit_to_index(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/workdir", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Workdir to index diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_workdir_to_index( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_workdir_to_index(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/index", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Index to tree diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_index_to_tree( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_index_to_tree(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/stats", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Diff statistics", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_stats( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_stats(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/patch-id", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Patch ID", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_patch_id( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_patch_id(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/diff/side-by-side", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Side-by-side diff", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_diff_side_by_side( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_diff_side_by_side(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/init.rs b/libs/api/git/init.rs new file mode 100644 index 0000000..37142dc --- /dev/null +++ b/libs/api/git/init.rs @@ -0,0 +1,113 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::init::GitInitRequest; +use session::Session; + +#[utoipa::path( + post, + path = "/api/git/init", + request_body = GitInitRequest, + responses( + (status = 200, description = "Bare repository initialized", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_init_bare( + service: web::Data, + body: web::Json, +) -> Result { + let resp = service.git_init_bare(body.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/git/open/{path}", + params( + ("path" = String, Path, description = "Repository path"), + ), + responses( + (status = 200, description = "Open repository", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_open( + service: web::Data, + path: web::Path, +) -> Result { + let resp = service.git_open(path.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/git/open/{path}/workdir", + params( + ("path" = String, Path, description = "Repository path"), + ), + responses( + (status = 200, description = "Open repository working directory", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_open_workdir( + service: web::Data, + path: web::Path, +) -> Result { + let resp = service.git_open_workdir(path.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/git/is-repo/{path}", + params( + ("path" = String, Path, description = "Repository path"), + ), + responses( + (status = 200, description = "Check if path is a repository", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_is_repo( + service: web::Data, + path: web::Path, +) -> Result { + let resp = service.git_is_repo(path.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/path", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Repository path", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_repo_path( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_repo_path(namespace, repo_name, &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "path": resp }))) +} diff --git a/libs/api/git/mod.rs b/libs/api/git/mod.rs new file mode 100644 index 0000000..202dc25 --- /dev/null +++ b/libs/api/git/mod.rs @@ -0,0 +1,417 @@ +pub mod archive; +pub mod blame; +pub mod blob; +pub mod branch; +pub mod branch_protection; +pub mod commit; +pub mod contributors; +pub mod diff; +pub mod init; +pub mod refs; +pub mod repo; +pub mod star; +pub mod tag; +pub mod tree; +pub mod watch; +pub mod webhook; + +use actix_web::web; + +pub fn init_git_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/repos/{namespace}/{repo}/git") + .route("/archive", web::get().to(archive::git_archive)) + .route("/archive/list", web::get().to(archive::git_archive_list)) + .route( + "/archive/summary", + web::get().to(archive::git_archive_summary), + ) + .route( + "/archive/cached", + web::get().to(archive::git_archive_cached), + ) + .route( + "/archive/invalidate", + web::get().to(archive::git_archive_invalidate), + ) + .route( + "/archive/invalidate-all/{commit_oid}", + web::get().to(archive::git_archive_invalidate_all), + ) + // blame + .route( + "/blame/{commit_oid}/{tail:.*}", + web::get().to(blame::git_blame_file), + ) + // blob + .route("/blob", web::post().to(blob::git_blob_create)) + .route("/blob/{oid}", web::get().to(blob::git_blob_get)) + .route("/blob/{oid}/exists", web::get().to(blob::git_blob_exists)) + .route( + "/blob/{oid}/is-binary", + web::get().to(blob::git_blob_is_binary), + ) + .route("/blob/{oid}/content", web::get().to(blob::git_blob_content)) + .route("/blob/{oid}/size", web::get().to(blob::git_blob_size)) + .route("/readme", web::get().to(blob::git_readme)) + // branch + .route("/branches", web::get().to(branch::git_branch_list)) + .route( + "/branches/summary", + web::get().to(branch::git_branch_summary), + ) + .route("/branches", web::post().to(branch::git_branch_create)) + // NOTE: /branches/current MUST be before /branches/{name} to avoid being shadowed + .route( + "/branches/current", + web::get().to(branch::git_branch_current), + ) + .route("/branches/{name}", web::get().to(branch::git_branch_get)) + .route( + "/branches/{name}", + web::delete().to(branch::git_branch_delete), + ) + .route( + "/branches/{name}/exists", + web::get().to(branch::git_branch_exists), + ) + .route( + "/branches/{name}/is-head", + web::get().to(branch::git_branch_is_head), + ) + .route( + "/branches/{name}/upstream", + web::get().to(branch::git_branch_upstream), + ) + .route( + "/branches/{name}/tracking-difference", + web::get().to(branch::git_branch_tracking_difference), + ) + .route( + "/branches/remote/{name}", + web::delete().to(branch::git_branch_delete_remote), + ) + .route( + "/branches/rename", + web::patch().to(branch::git_branch_rename), + ) + .route("/branches/move", web::patch().to(branch::git_branch_move)) + .route( + "/branches/upstream", + web::patch().to(branch::git_branch_set_upstream), + ) + .route("/branches/diff", web::get().to(branch::git_branch_diff)) + .route( + "/branches/is-detached", + web::get().to(branch::git_branch_is_detached), + ) + .route( + "/branches/is-merged", + web::get().to(branch::git_branch_is_merged), + ) + .route( + "/branches/merge-base", + web::get().to(branch::git_branch_merge_base), + ) + .route( + "/branches/is-ancestor", + web::get().to(branch::git_branch_is_ancestor), + ) + .route( + "/branches/fast-forward/{target}", + web::post().to(branch::git_branch_fast_forward), + ) + .route( + "/branches/is-conflicted", + web::get().to(branch::git_branch_is_conflicted), + ) + // commit + .route("/commits", web::get().to(commit::git_commit_log)) + .route("/commits/count", web::get().to(commit::git_commit_count)) + .route("/commits", web::post().to(commit::git_commit_create)) + .route("/commits/graph", web::get().to(commit::git_commit_graph)) + .route( + "/commits/graph-react", + web::get().to(commit::git_commit_graph_react), + ) + .route("/commits/walk", web::get().to(commit::git_commit_walk)) + .route( + "/commits/resolve/{rev}", + web::get().to(commit::git_commit_resolve_rev), + ) + .route("/commits/{oid}", web::get().to(commit::git_commit_get)) + .route("/commits/{oid}", web::patch().to(commit::git_commit_amend)) + .route( + "/commits/{oid}/exists", + web::get().to(commit::git_commit_exists), + ) + .route( + "/commits/{oid}/is-commit", + web::get().to(commit::git_commit_is_commit), + ) + .route( + "/commits/{oid}/message", + web::get().to(commit::git_commit_message), + ) + .route( + "/commits/{oid}/summary", + web::get().to(commit::git_commit_summary), + ) + .route( + "/commits/{oid}/short-id", + web::get().to(commit::git_commit_short_id), + ) + .route( + "/commits/{oid}/author", + web::get().to(commit::git_commit_author), + ) + .route( + "/commits/{oid}/tree-id", + web::get().to(commit::git_commit_tree_id), + ) + .route( + "/commits/{oid}/parent-count", + web::get().to(commit::git_commit_parent_count), + ) + .route( + "/commits/{oid}/parent-ids", + web::get().to(commit::git_commit_parent_ids), + ) + .route( + "/commits/{oid}/parent/{index}", + web::get().to(commit::git_commit_parent), + ) + .route( + "/commits/{oid}/first-parent", + web::get().to(commit::git_commit_first_parent), + ) + .route( + "/commits/{oid}/is-merge", + web::get().to(commit::git_commit_is_merge), + ) + .route( + "/commits/{oid}/refs", + web::get().to(commit::git_commit_refs), + ) + .route( + "/commits/branches", + web::get().to(commit::git_commit_branches), + ) + .route("/commits/tags", web::get().to(commit::git_commit_tags)) + .route( + "/commits/{oid}/is-tip", + web::get().to(commit::git_commit_is_tip), + ) + .route( + "/commits/{oid}/ref-count", + web::get().to(commit::git_commit_ref_count), + ) + .route("/commits/reflog", web::get().to(commit::git_commit_reflog)) + .route( + "/commits/{oid}/ancestors", + web::get().to(commit::git_commit_ancestors), + ) + .route( + "/commits/{oid}/descendants", + web::get().to(commit::git_commit_descendants), + ) + .route( + "/commits/{oid}/cherry-pick", + web::post().to(commit::git_commit_cherry_pick), + ) + .route( + "/commits/{oid}/cherry-pick/abort", + web::post().to(commit::git_commit_cherry_pick_abort), + ) + .route( + "/commits/{oid}/revert", + web::post().to(commit::git_commit_revert), + ) + .route( + "/commits/{oid}/revert/abort", + web::post().to(commit::git_commit_revert_abort), + ) + // contributors + .route( + "/contributors", + web::get().to(contributors::git_contributors), + ) + // diff + .route("/diff", web::get().to(diff::git_diff_tree_to_tree)) + .route( + "/diff/commit/{commit}", + web::get().to(diff::git_diff_commit_to_workdir), + ) + .route( + "/diff/commit/{commit}/index", + web::get().to(diff::git_diff_commit_to_index), + ) + .route( + "/diff/workdir", + web::get().to(diff::git_diff_workdir_to_index), + ) + .route("/diff/index", web::get().to(diff::git_diff_index_to_tree)) + .route("/diff/stats", web::get().to(diff::git_diff_stats)) + .route("/diff/patch-id", web::get().to(diff::git_diff_patch_id)) + .route( + "/diff/side-by-side", + web::get().to(diff::git_diff_side_by_side), + ) + // refs + .route("/refs", web::get().to(refs::git_ref_list)) + .route("/refs", web::post().to(refs::git_ref_create)) + .route("/refs/{name}", web::get().to(refs::git_ref_get)) + .route("/refs/{name}", web::delete().to(refs::git_ref_delete)) + .route("/refs/rename", web::patch().to(refs::git_ref_rename)) + .route("/refs/update", web::patch().to(refs::git_ref_update)) + .route("/refs/{name}/exists", web::get().to(refs::git_ref_exists)) + .route("/refs/{name}/target", web::get().to(refs::git_ref_target)) + // repo (description, config, merge) + .route("/description", web::get().to(repo::git_description_get)) + .route("/description", web::put().to(repo::git_description_set)) + .route( + "/description", + web::delete().to(repo::git_description_reset), + ) + .route( + "/description/exists", + web::get().to(repo::git_description_exists), + ) + .route("/git", web::patch().to(repo::git_update_repo)) + .route("/config/entries", web::get().to(repo::git_config_entries)) + .route("/config/{key}", web::get().to(repo::git_config_get)) + .route("/config/{key}", web::put().to(repo::git_config_set)) + .route("/config/{key}", web::delete().to(repo::git_config_delete)) + .route("/config/{key}/has", web::get().to(repo::git_config_has)) + .route( + "/merge/analysis/{their_oid}", + web::get().to(repo::git_merge_analysis), + ) + .route( + "/merge/analysis/{ref_name}/{their_oid}", + web::get().to(repo::git_merge_analysis_for_ref), + ) + .route( + "/merge/base/{oid1}/{oid2}", + web::get().to(repo::git_merge_base), + ) + .route("/merge/commits", web::post().to(repo::git_merge_commits)) + .route("/merge/trees", web::post().to(repo::git_merge_trees)) + .route("/merge/abort", web::post().to(repo::git_merge_abort)) + .route( + "/merge/in-progress", + web::get().to(repo::git_merge_is_in_progress), + ) + .route("/merge/heads", web::get().to(repo::git_mergehead_list)) + .route( + "/merge/is-conflicted", + web::get().to(repo::git_merge_is_conflicted), + ) + // star + .route("/star", web::post().to(star::git_star)) + .route("/star", web::delete().to(star::git_unstar)) + .route("/star/is-starred", web::get().to(star::git_is_starred)) + .route("/star/count", web::get().to(star::git_star_count)) + .route("/star/users", web::get().to(star::git_star_user_list)) + // branch protection + .route( + "/branch-protections", + web::get().to(branch_protection::branch_protection_list), + ) + .route( + "/branch-protections", + web::post().to(branch_protection::branch_protection_create), + ) + .route( + "/branch-protections/check-approvals", + web::get().to(branch_protection::branch_protection_check_approvals), + ) + .route( + "/branch-protections/{id}", + web::get().to(branch_protection::branch_protection_get), + ) + .route( + "/branch-protections/{id}", + web::patch().to(branch_protection::branch_protection_update), + ) + .route( + "/branch-protections/{id}", + web::delete().to(branch_protection::branch_protection_delete), + ) + // tag + .route("/tags", web::get().to(tag::git_tag_list)) + .route("/tags/names", web::get().to(tag::git_tag_list_names)) + .route("/tags/summary", web::get().to(tag::git_tag_summary)) + .route("/tags/count", web::get().to(tag::git_tag_count)) + .route("/tags", web::post().to(tag::git_tag_create)) + .route( + "/tags/lightweight", + web::post().to(tag::git_tag_create_lightweight), + ) + .route("/tags/rename", web::patch().to(tag::git_tag_rename)) + .route( + "/tags/message", + web::patch().to(tag::git_tag_update_message), + ) + .route("/tags/{name}", web::get().to(tag::git_tag_get)) + .route("/tags/{name}", web::delete().to(tag::git_tag_delete)) + .route("/tags/{name}/exists", web::get().to(tag::git_tag_exists)) + .route("/tags/{name}/target", web::get().to(tag::git_tag_target)) + .route( + "/tags/{name}/is-annotated", + web::get().to(tag::git_tag_is_annotated), + ) + .route("/tags/{name}/message", web::get().to(tag::git_tag_message)) + .route("/tags/{name}/tagger", web::get().to(tag::git_tag_tagger)) + // tree + .route("/tree/{oid}", web::get().to(tree::git_tree_get)) + .route("/tree/{oid}/exists", web::get().to(tree::git_tree_exists)) + .route("/tree/{oid}/list", web::get().to(tree::git_tree_list)) + .route( + "/tree/{oid}/entry/{index}", + web::get().to(tree::git_tree_entry), + ) + .route( + "/tree/{oid}/entry-by-path", + web::get().to(tree::git_tree_entry_by_path), + ) + .route( + "/tree/{commit}/commit-entry-by-path", + web::get().to(tree::git_tree_entry_by_commit_path), + ) + .route( + "/tree/{oid}/entry-count", + web::get().to(tree::git_tree_entry_count), + ) + .route( + "/tree/{oid}/is-empty", + web::get().to(tree::git_tree_is_empty), + ) + .route("/tree/diff-stats", web::get().to(tree::git_tree_diffstats)) + // watch + .route("/watch", web::post().to(watch::git_watch)) + .route("/watch", web::delete().to(watch::git_unwatch)) + .route("/watch/is-watched", web::get().to(watch::git_is_watched)) + .route("/watch/count", web::get().to(watch::git_watch_count)) + .route("/watch/users", web::get().to(watch::git_watch_user_list)) + // webhook + .route("/webhooks", web::get().to(webhook::git_webhook_list)) + .route("/webhooks", web::post().to(webhook::git_webhook_create)) + .route( + "/webhooks/{webhook_id}", + web::get().to(webhook::git_webhook_get), + ) + .route( + "/webhooks/{webhook_id}", + web::patch().to(webhook::git_webhook_update), + ) + .route( + "/webhooks/{webhook_id}", + web::delete().to(webhook::git_webhook_delete), + ), + ); +} + +pub fn init_git_toplevel_routes(cfg: &mut web::ServiceConfig) { + cfg.service(web::scope("/git").route("/is-repo/{path}", web::get().to(init::git_is_repo))); +} diff --git a/libs/api/git/refs.rs b/libs/api/git/refs.rs new file mode 100644 index 0000000..573b5b3 --- /dev/null +++ b/libs/api/git/refs.rs @@ -0,0 +1,226 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::refs::{ + RefCreateRequest, RefDeleteResponse, RefExistsResponse, RefInfoResponse, RefListQuery, + RefRenameQuery, RefTargetResponse, RefUpdateRequest, RefUpdateResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/refs", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "List of refs", body = ApiResponse>), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_ref_list(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/refs/{name}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Ref name"), + ), + responses( + (status = 200, description = "Ref info", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_ref_get(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/refs", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = RefCreateRequest, + responses( + (status = 200, description = "Ref created", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_ref_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/refs/{name}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Ref name"), + ), + responses( + (status = 200, description = "Ref deleted", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_ref_delete(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/refs/rename", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "Ref renamed", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_rename( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_ref_rename(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + put, + path = "/api/repos/{namespace}/{repo}/git/refs", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = RefUpdateRequest, + responses( + (status = 200, description = "Ref updated", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_update( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_ref_update(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/refs/{name}/exists", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Ref name"), + ), + responses( + (status = 200, description = "Ref exists check", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_ref_exists(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/refs/{name}/target", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Ref name"), + ), + responses( + (status = 200, description = "Ref target", body = ApiResponse), + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_ref_target( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let resp = service + .git_ref_target(namespace, repo_name, name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/repo.rs b/libs/api/git/repo.rs new file mode 100644 index 0000000..c4c9167 --- /dev/null +++ b/libs/api/git/repo.rs @@ -0,0 +1,542 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::repo::{ + ConfigBoolResponse, ConfigDeleteQuery, ConfigEntriesQuery, ConfigGetQuery, ConfigSetRequest, + ConfigSnapshotResponse, DescriptionQuery, DescriptionResponse, GitUpdateRepoRequest, + MergeAnalysisQuery, MergeAnalysisResponse, MergeCommitsRequest, MergeRefAnalysisQuery, + MergeTreesRequest, MergeheadInfoResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/description", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get repository description", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_description_get( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_description_get(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + put, + path = "/api/repos/{namespace}/{repo}/git/description", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = DescriptionQuery, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Set repository description", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_description_set( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_description_set(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/description", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Reset repository description", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_description_reset( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_description_reset(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/description/exists", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if repository description exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_description_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_description_exists(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"exists": resp})).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/config/entries", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List repository config entries", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_config_entries( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_config_entries(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/config/{key}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("key" = String, Path, description = "Config key"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get repository config value", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_config_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, key) = path.into_inner(); + let mut req = query.into_inner(); + req.key = key; + let resp = service + .git_config_get(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"value": resp})).to_response()) +} + +#[utoipa::path( + put, + path = "/api/repos/{namespace}/{repo}/git/config/{key}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("key" = String, Path, description = "Config key"), + ), + request_body = ConfigSetRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Set repository config value"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_config_set( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, key) = path.into_inner(); + let mut req = body.into_inner(); + req.key = key; + service + .git_config_set(namespace, repo_name, req, &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/config/{key}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("key" = String, Path, description = "Config key"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Delete repository config key"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_config_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, key) = path.into_inner(); + let mut req = query.into_inner(); + req.key = key; + service + .git_config_delete(namespace, repo_name, req, &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/config/{key}/has", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("key" = String, Path, description = "Config key"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if repository config key exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_config_has( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, key) = path.into_inner(); + let mut req = query.into_inner(); + req.key = key; + let resp = service + .git_config_has(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/analysis/{their_oid}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("their_oid" = String, Path, description = "The OID to analyze merge against"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Perform merge analysis", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_analysis( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, their_oid) = path.into_inner(); + let mut req = query.into_inner(); + req.their_oid = their_oid; + let resp = service + .git_merge_analysis(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/analysis/{ref_name}/{their_oid}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("ref_name" = String, Path, description = "Reference name"), + ("their_oid" = String, Path, description = "The OID to analyze merge against"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Perform merge analysis for a specific ref", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_analysis_for_ref( + service: web::Data, + session: Session, + path: web::Path<(String, String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, ref_name, their_oid) = path.into_inner(); + let mut req = query.into_inner(); + req.ref_name = ref_name; + req.their_oid = their_oid; + let resp = service + .git_merge_analysis_for_ref(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/base/{oid1}/{oid2}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid1" = String, Path, description = "First commit OID"), + ("oid2" = String, Path, description = "Second commit OID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get merge base of two commits", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_base( + service: web::Data, + session: Session, + path: web::Path<(String, String, String, String)>, +) -> Result { + let (namespace, repo_name, oid1, oid2) = path.into_inner(); + let resp = service + .git_merge_base(namespace, repo_name, oid1, oid2, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"merge_base": resp})).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/merge/commits", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = MergeCommitsRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Merge commits"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_commits( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_merge_commits(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/merge/trees", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = MergeTreesRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Merge trees"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_trees( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_merge_trees(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/merge/abort", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Abort an in-progress merge"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_abort( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_merge_abort(namespace, repo_name, &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/in-progress", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if merge is in progress", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_is_in_progress( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_merge_is_in_progress(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"in_progress": resp})).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/heads", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List merge heads", body = ApiResponse>), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_mergehead_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_mergehead_list(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/merge/is-conflicted", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if merge has conflicts", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_merge_is_conflicted( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_merge_is_conflicted(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"is_conflicted": resp})).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = GitUpdateRepoRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Update repository settings"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_update_repo( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_update_repo(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(crate::api_success()) +} diff --git a/libs/api/git/star.rs b/libs/api/git/star.rs new file mode 100644 index 0000000..5a0466f --- /dev/null +++ b/libs/api/git/star.rs @@ -0,0 +1,149 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::star::{StarCountResponse, StarUserListResponse}; +use session::Session; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct StarPagerQuery { + pub page: Option, + pub par_page: Option, +} + +impl From for service::Pager { + fn from(q: StarPagerQuery) -> Self { + service::Pager { + page: q.page.unwrap_or(1), + par_page: q.par_page.unwrap_or(20), + } + } +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/star", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Star the repository"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_star( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service.git_star(namespace, repo_name, &session).await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/star", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Unstar the repository"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_unstar( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service.git_unstar(namespace, repo_name, &session).await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/star/is-starred", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if the current user has starred the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_is_starred( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_is_starred(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"is_starred": resp})).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/star/count", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get star count for the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_star_count( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_star_count(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/star/users", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("page" = Option, Query, description = "Page number"), + ("per_page" = Option, Query, description = "Items per page"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List users who starred the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_star_user_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_star_user_list(namespace, repo_name, query.into_inner().into(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/tag.rs b/libs/api/git/tag.rs new file mode 100644 index 0000000..b2a52f8 --- /dev/null +++ b/libs/api/git/tag.rs @@ -0,0 +1,434 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::tag::{ + TagCountResponse, TagCreateLightweightRequest, TagCreateRequest, TagExistsResponse, + TagGetQuery, TagInfoResponse, TagIsAnnotatedResponse, TagMessageResponse, TagRenameQuery, + TagSummaryResponse, TagTaggerResponse, TagTargetQuery, TagTargetResponse, + TagUpdateMessageRequest, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List all tags", body = ApiResponse>), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service.git_tag_list(namespace, repo_name, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/names", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List all tag names", body = ApiResponse>), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_list_names( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_list_names(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/summary", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tag summary", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_summary( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_summary(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/count", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tag count", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_count( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_count(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/tags", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = TagCreateRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Create an annotated tag", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/tags/lightweight", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = TagCreateLightweightRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Create a lightweight tag", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_create_lightweight( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_create_lightweight(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get a tag by name", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_get(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Delete a tag"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + service + .git_tag_delete(namespace, repo_name, req, &session) + .await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ "success": true }))) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}/exists", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if a tag exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_exists(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}/target", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tag target OID", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_target( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_target(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}/is-annotated", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if a tag is annotated", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_is_annotated( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_is_annotated(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}/message", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tag message", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_message( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_message(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/tags/rename", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = TagRenameQuery, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Rename a tag", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_rename( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_rename(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/tags/message", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = TagUpdateMessageRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Update tag message", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_update_message( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tag_update_message(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tags/{name}/tagger", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("name" = String, Path, description = "Tag name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tag tagger info", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tag_tagger( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, name) = path.into_inner(); + let mut req = query.into_inner(); + req.name = name; + let resp = service + .git_tag_tagger(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/tree.rs b/libs/api/git/tree.rs new file mode 100644 index 0000000..2d1238e --- /dev/null +++ b/libs/api/git/tree.rs @@ -0,0 +1,278 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::tree::{ + TreeDiffQuery, TreeDiffStatsResponse, TreeEntryByCommitPathQuery, TreeEntryByPathQuery, + TreeEntryCountResponse, TreeEntryQuery, TreeEntryResponse, TreeExistsResponse, TreeGetQuery, + TreeInfoResponse, TreeIsEmptyResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree info", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_get(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/exists", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if tree exists", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_exists( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_exists(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/list", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List tree entries", body = ApiResponse>), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_list(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry/{index}", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ("index" = usize, Path, description = "Entry index"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree entry by index", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_entry( + service: web::Data, + session: Session, + path: web::Path<(String, String, String, usize)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid, index) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + req.index = index; + let resp = service + .git_tree_entry(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry-by-path", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree entry by path", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_entry_by_path( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_entry_by_path(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{commit}/commit-entry-by-path", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("commit" = String, Path, description = "Commit OID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree entry by commit path", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_entry_by_commit_path( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, commit) = path.into_inner(); + let mut req = query.into_inner(); + req.commit = commit; + let resp = service + .git_tree_entry_by_commit_path(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry-count", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree entry count", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_entry_count( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_entry_count(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/{oid}/is-empty", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("oid" = String, Path, description = "Tree object ID"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if tree is empty", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_is_empty( + service: web::Data, + session: Session, + path: web::Path<(String, String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + let mut req = query.into_inner(); + req.oid = oid; + let resp = service + .git_tree_is_empty(namespace, repo_name, req, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/tree/diff-stats", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get tree diff stats", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_tree_diffstats( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_tree_diffstats(namespace, repo_name, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/watch.rs b/libs/api/git/watch.rs new file mode 100644 index 0000000..02641ca --- /dev/null +++ b/libs/api/git/watch.rs @@ -0,0 +1,153 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::watch::{GitWatchRequest, WatchCountResponse, WatchUserListResponse}; +use session::Session; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct WatchPagerQuery { + pub page: Option, + pub par_page: Option, +} + +impl From for service::Pager { + fn from(q: WatchPagerQuery) -> Self { + service::Pager { + page: q.page.unwrap_or(1), + par_page: q.par_page.unwrap_or(20), + } + } +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/watch", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = GitWatchRequest, + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Watch the repository"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_watch( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service + .git_watch(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/watch", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Unwatch the repository"), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_unwatch( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + service.git_unwatch(namespace, repo_name, &session).await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/watch/is-watched", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Check if the current user is watching the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_is_watched( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_is_watched(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({"is_watched": resp})).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/watch/count", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "Get watch count for the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_watch_count( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_watch_count(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/watch/users", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("page" = Option, Query, description = "Page number"), + ("per_page" = Option, Query, description = "Items per page"), + ), + responses( + (status = 401, description = "Unauthorized", body = ApiResponse), + (status = 200, description = "List users who are watching the repository", body = ApiResponse), + (status = 404, description = "Not found", body = ApiResponse), + ), + tag = "Git" +)] +pub async fn git_watch_user_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_watch_user_list(namespace, repo_name, query.into_inner().into(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/git/webhook.rs b/libs/api/git/webhook.rs new file mode 100644 index 0000000..ae5690d --- /dev/null +++ b/libs/api/git/webhook.rs @@ -0,0 +1,153 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::webhook::{ + CreateWebhookParams, UpdateWebhookParams, WebhookListResponse, WebhookResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/webhooks", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + responses( + (status = 200, description = "List webhooks", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Git" +)] +pub async fn git_webhook_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_webhook_list(namespace, repo_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repos/{namespace}/{repo}/git/webhooks", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ), + request_body = CreateWebhookParams, + responses( + (status = 200, description = "Create webhook", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Git" +)] +pub async fn git_webhook_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let resp = service + .git_webhook_create(namespace, repo_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repos/{namespace}/{repo}/git/webhooks/{webhook_id}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("webhook_id" = i64, Path, description = "Webhook ID"), + ), + responses( + (status = 200, description = "Get webhook", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Git" +)] +pub async fn git_webhook_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo_name, webhook_id) = path.into_inner(); + let resp = service + .git_webhook_get(namespace, repo_name, webhook_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repos/{namespace}/{repo}/git/webhooks/{webhook_id}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("webhook_id" = i64, Path, description = "Webhook ID"), + ), + request_body = UpdateWebhookParams, + responses( + (status = 200, description = "Update webhook", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Git" +)] +pub async fn git_webhook_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo_name, webhook_id) = path.into_inner(); + let resp = service + .git_webhook_update( + namespace, + repo_name, + webhook_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repos/{namespace}/{repo}/git/webhooks/{webhook_id}", + params( + ("namespace" = String, Path, description = "Repository namespace"), + ("repo" = String, Path, description = "Repository name"), + ("webhook_id" = i64, Path, description = "Webhook ID"), + ), + responses( + (status = 200, description = "Delete webhook"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Git" +)] +pub async fn git_webhook_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo_name, webhook_id) = path.into_inner(); + service + .git_webhook_delete(namespace, repo_name, webhook_id, &session) + .await?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/issue/assignee.rs b/libs/api/issue/assignee.rs new file mode 100644 index 0000000..16ec87a --- /dev/null +++ b/libs/api/issue/assignee.rs @@ -0,0 +1,89 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/assignees", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue assignees", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_assignee_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_assignee_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/assignees", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueAssignUserRequest, + responses( + (status = 200, description = "Add assignee to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_assignee_add( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_assignee_add(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/assignees/{assignee_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("assignee_id" = String, Path), + ), + responses( + (status = 200, description = "Remove assignee from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_assignee_remove( + service: web::Data, + session: Session, + path: web::Path<(String, i64, String)>, +) -> Result { + let (project, issue_number, assignee_id) = path.into_inner(); + let assignee_uuid = uuid::Uuid::parse_str(&assignee_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service + .issue_assignee_remove(project, issue_number, assignee_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/comment.rs b/libs/api/issue/comment.rs new file mode 100644 index 0000000..296c61e --- /dev/null +++ b/libs/api/issue/comment.rs @@ -0,0 +1,159 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/comments", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List issue comments", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + query: web::Query, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_comment_list( + project, + issue_number, + Some(query.page.unwrap_or(1)), + Some(query.per_page.unwrap_or(20)), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Get issue comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_get( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, +) -> Result { + let (project, issue_number, comment_id) = path.into_inner(); + let resp = service + .issue_comment_get(project, issue_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/comments", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueCommentCreateRequest, + responses( + (status = 200, description = "Create issue comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_create( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_comment_create(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ), + request_body = service::issue::IssueCommentUpdateRequest, + responses( + (status = 200, description = "Update issue comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_update( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number, comment_id) = path.into_inner(); + let resp = service + .issue_comment_update( + project, + issue_number, + comment_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Delete issue comment"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_delete( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, +) -> Result { + let (project, issue_number, comment_id) = path.into_inner(); + service + .issue_comment_delete(project, issue_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/comment_reaction.rs b/libs/api/issue/comment_reaction.rs new file mode 100644 index 0000000..90b09f3 --- /dev/null +++ b/libs/api/issue/comment_reaction.rs @@ -0,0 +1,96 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}/reactions", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "List comment reactions", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_reaction_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, +) -> Result { + let (project, issue_number, comment_id) = path.into_inner(); + let resp = service + .issue_comment_reaction_list(project, issue_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}/reactions", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ), + request_body = service::issue::ReactionAddRequest, + responses( + (status = 200, description = "Add reaction to comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_reaction_add( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number, comment_id) = path.into_inner(); + let resp = service + .issue_comment_reaction_add( + project, + issue_number, + comment_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/comments/{comment_id}/reactions/{reaction}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("comment_id" = i64, Path), + ("reaction" = String, Path), + ), + responses( + (status = 200, description = "Remove reaction from comment"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_comment_reaction_remove( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64, String)>, +) -> Result { + let (project, issue_number, comment_id, reaction) = path.into_inner(); + service + .issue_comment_reaction_remove(project, issue_number, comment_id, reaction, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/issue_label.rs b/libs/api/issue/issue_label.rs new file mode 100644 index 0000000..13958c3 --- /dev/null +++ b/libs/api/issue/issue_label.rs @@ -0,0 +1,87 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/labels", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue labels", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_label_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_label_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/labels", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueAddLabelRequest, + responses( + (status = 200, description = "Add label to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_label_add( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_label_add(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/labels/{label_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("label_id" = i64, Path), + ), + responses( + (status = 200, description = "Remove label from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_label_remove( + service: web::Data, + session: Session, + path: web::Path<(String, i64, i64)>, +) -> Result { + let (project, issue_number, label_id) = path.into_inner(); + service + .issue_label_remove(project, issue_number, label_id, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/label.rs b/libs/api/issue/label.rs new file mode 100644 index 0000000..d8edd14 --- /dev/null +++ b/libs/api/issue/label.rs @@ -0,0 +1,76 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/labels", + params(("project" = String, Path)), + responses( + (status = 200, description = "List labels", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Issues" +)] +pub async fn label_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project = path.into_inner(); + let resp = service.label_list(project, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/labels", + params(("project" = String, Path)), + request_body = service::issue::CreateLabelRequest, + responses( + (status = 200, description = "Create label", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn label_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project = path.into_inner(); + let resp = service + .label_create(project, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/labels/{label_id}", + params( + ("project" = String, Path), + ("label_id" = i64, Path), + ), + responses( + (status = 200, description = "Delete label"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn label_delete( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, label_id) = path.into_inner(); + service.label_delete(project, label_id, &session).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/mod.rs b/libs/api/issue/mod.rs new file mode 100644 index 0000000..910ad6c --- /dev/null +++ b/libs/api/issue/mod.rs @@ -0,0 +1,359 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +pub mod assignee; +pub mod comment; +pub mod comment_reaction; +pub mod issue_label; +pub mod label; +pub mod pull_request; +pub mod reaction; +pub mod repo; +pub mod subscriber; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ListQuery { + pub state: Option, + pub page: Option, + pub per_page: Option, +} + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct PagerQuery { + pub page: Option, + pub per_page: Option, +} + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues", + params( + ("project" = String, Path), + ("state" = Option, Query), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List issues", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_list( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project = path.into_inner(); + let resp = service + .issue_list( + project, + query.state.clone(), + Some(query.page.unwrap_or(1)), + Some(query.per_page.unwrap_or(20)), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Get issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_get( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, number) = path.into_inner(); + let resp = service.issue_get(project, number, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues", + params(("project" = String, Path)), + request_body = service::issue::IssueCreateRequest, + responses( + (status = 200, description = "Create issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project = path.into_inner(); + let resp = service + .issue_create(project, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/issue/{project}/issues/{number}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueUpdateRequest, + responses( + (status = 200, description = "Update issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_update( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, number) = path.into_inner(); + let resp = service + .issue_update(project, number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/close", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Close issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_close( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, number) = path.into_inner(); + let resp = service.issue_close(project, number, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/reopen", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Reopen issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_reopen( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, number) = path.into_inner(); + let resp = service.issue_reopen(project, number, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Delete issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_delete( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, number) = path.into_inner(); + service.issue_delete(project, number, &session).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/summary", + params(("project" = String, Path)), + responses( + (status = 200, description = "Get issue summary", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_summary( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project = path.into_inner(); + let resp = service.issue_summary(project, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +pub fn init_issue_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("issue/{project}") + .route("/issues", web::get().to(issue_list)) + .route("/issues", web::post().to(issue_create)) + .route("/issues/summary", web::get().to(issue_summary)) + .route("/issues/{number}", web::get().to(issue_get)) + .route("/issues/{number}", web::patch().to(issue_update)) + .route("/issues/{number}", web::delete().to(issue_delete)) + .route("/issues/{number}/close", web::post().to(issue_close)) + .route("/issues/{number}/reopen", web::post().to(issue_reopen)) + .route( + "/issues/{number}/labels", + web::get().to(issue_label::issue_label_list), + ) + .route( + "/issues/{number}/labels", + web::post().to(issue_label::issue_label_add), + ) + .route( + "/issues/{number}/labels/{label_id}", + web::delete().to(issue_label::issue_label_remove), + ) + .route( + "/issues/{number}/comments", + web::get().to(comment::issue_comment_list), + ) + .route( + "/issues/{number}/comments/{comment_id}", + web::get().to(comment::issue_comment_get), + ) + .route( + "/issues/{number}/comments", + web::post().to(comment::issue_comment_create), + ) + .route( + "/issues/{number}/comments/{comment_id}", + web::patch().to(comment::issue_comment_update), + ) + .route( + "/issues/{number}/comments/{comment_id}", + web::delete().to(comment::issue_comment_delete), + ) + .route( + "/issues/{number}/comments/{comment_id}/reactions", + web::get().to(comment_reaction::issue_comment_reaction_list), + ) + .route( + "/issues/{number}/comments/{comment_id}/reactions", + web::post().to(comment_reaction::issue_comment_reaction_add), + ) + .route( + "/issues/{number}/comments/{comment_id}/reactions/{reaction}", + web::delete().to(comment_reaction::issue_comment_reaction_remove), + ) + .route( + "/issues/{number}/assignees", + web::get().to(assignee::issue_assignee_list), + ) + .route( + "/issues/{number}/assignees", + web::post().to(assignee::issue_assignee_add), + ) + .route( + "/issues/{number}/assignees/{assignee_id}", + web::delete().to(assignee::issue_assignee_remove), + ) + .route( + "/issues/{number}/subscribers", + web::get().to(subscriber::issue_subscriber_list), + ) + .route( + "/issues/{number}/subscribe", + web::post().to(subscriber::issue_subscribe), + ) + .route( + "/issues/{number}/subscribe", + web::delete().to(subscriber::issue_unsubscribe), + ) + .route( + "/issues/{number}/reactions", + web::get().to(reaction::issue_reaction_list), + ) + .route( + "/issues/{number}/reactions", + web::post().to(reaction::issue_reaction_add), + ) + .route( + "/issues/{number}/reactions/{reaction}", + web::delete().to(reaction::issue_reaction_remove), + ) + .route( + "/issues/{number}/repos", + web::get().to(repo::issue_repo_list), + ) + .route( + "/issues/{number}/repos", + web::post().to(repo::issue_repo_link), + ) + .route( + "/issues/{number}/repos/{repo_id}", + web::delete().to(repo::issue_repo_unlink), + ) + .route( + "/issues/{number}/pulls", + web::get().to(pull_request::issue_pull_request_list), + ) + .route( + "/issues/{number}/pulls", + web::post().to(pull_request::issue_pull_request_link), + ) + .route( + "/issues/{number}/pulls/{repo_id}/{pr_number}", + web::delete().to(pull_request::issue_pull_request_unlink), + ) + // labels (at project level) + .route("/labels", web::get().to(label::label_list)) + .route("/labels", web::post().to(label::label_create)) + .route("/labels/{label_id}", web::delete().to(label::label_delete)), + ); +} diff --git a/libs/api/issue/pull_request.rs b/libs/api/issue/pull_request.rs new file mode 100644 index 0000000..a173283 --- /dev/null +++ b/libs/api/issue/pull_request.rs @@ -0,0 +1,90 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/pulls", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue pull requests", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_pull_request_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_pull_request_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/pulls", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueLinkPullRequestRequest, + responses( + (status = 200, description = "Link pull request to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_pull_request_link( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_pull_request_link(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/pulls/{repo_id}/{pr_number}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("repo_id" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Unlink pull request from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_pull_request_unlink( + service: web::Data, + session: Session, + path: web::Path<(String, i64, String, i64)>, +) -> Result { + let (project, issue_number, repo_id, pr_number) = path.into_inner(); + let repo_uuid = uuid::Uuid::parse_str(&repo_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service + .issue_pull_request_unlink(project, issue_number, repo_uuid, pr_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/reaction.rs b/libs/api/issue/reaction.rs new file mode 100644 index 0000000..364aa25 --- /dev/null +++ b/libs/api/issue/reaction.rs @@ -0,0 +1,87 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/reactions", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue reactions", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_reaction_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_reaction_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/reactions", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::ReactionAddRequest, + responses( + (status = 200, description = "Add reaction to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_reaction_add( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_reaction_add(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/reactions/{reaction}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("reaction" = String, Path), + ), + responses( + (status = 200, description = "Remove reaction from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_reaction_remove( + service: web::Data, + session: Session, + path: web::Path<(String, i64, String)>, +) -> Result { + let (project, issue_number, reaction) = path.into_inner(); + service + .issue_reaction_remove(project, issue_number, reaction, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/repo.rs b/libs/api/issue/repo.rs new file mode 100644 index 0000000..eb321f6 --- /dev/null +++ b/libs/api/issue/repo.rs @@ -0,0 +1,89 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/repos", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue repos", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_repo_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_repo_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/repos", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + request_body = service::issue::IssueLinkRepoRequest, + responses( + (status = 200, description = "Link repo to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_repo_link( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_repo_link(project, issue_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/repos/{repo_id}", + params( + ("project" = String, Path), + ("number" = i64, Path), + ("repo_id" = String, Path), + ), + responses( + (status = 200, description = "Unlink repo from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_repo_unlink( + service: web::Data, + session: Session, + path: web::Path<(String, i64, String)>, +) -> Result { + let (project, issue_number, repo_id) = path.into_inner(); + let repo_uuid = uuid::Uuid::parse_str(&repo_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service + .issue_repo_unlink(project, issue_number, repo_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/issue/subscriber.rs b/libs/api/issue/subscriber.rs new file mode 100644 index 0000000..4731c08 --- /dev/null +++ b/libs/api/issue/subscriber.rs @@ -0,0 +1,84 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/issue/{project}/issues/{number}/subscribers", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "List issue subscribers", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_subscriber_list( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_subscriber_list(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/issue/{project}/issues/{number}/subscribe", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Subscribe to issue", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_subscribe( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + let resp = service + .issue_subscribe(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/issue/{project}/issues/{number}/subscribe", + params( + ("project" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Unsubscribe from issue"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Issues" +)] +pub async fn issue_unsubscribe( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project, issue_number) = path.into_inner(); + service + .issue_unsubscribe(project, issue_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/lib.rs b/libs/api/lib.rs new file mode 100644 index 0000000..031eaac --- /dev/null +++ b/libs/api/lib.rs @@ -0,0 +1,16 @@ +pub mod agent; +pub mod auth; +pub mod error; +pub mod git; +pub mod issue; +pub mod openapi; +pub mod project; +pub mod pull_request; +pub mod room; +pub mod route; +pub mod search; +pub mod skill; +pub mod user; +pub mod workspace; + +pub use error::{api_success, ApiError, ApiResponse}; diff --git a/libs/api/openapi.rs b/libs/api/openapi.rs new file mode 100644 index 0000000..7eea55e --- /dev/null +++ b/libs/api/openapi.rs @@ -0,0 +1,714 @@ +#![allow(unused_imports, dead_code)] +//! OpenAPI 3.0 specification for the entire API surface. +//! +//! This module aggregates all `#[utoipa::path]` annotated handlers from every +//! API module and all `#[derive(utoipa::ToSchema)]` types used in request / +//! response bodies, so that `utoipa` can produce a single `openapi.json`. + +use utoipa::OpenApi; + +// Pull request query type defined in api::pull_request + +// Room query types defined in api::room + +#[derive(OpenApi)] +#[openapi( + paths( + // Auth + crate::auth::login::api_auth_login, + crate::auth::register::api_auth_register, + crate::auth::logout::api_auth_logout, + crate::auth::captcha::api_auth_captcha, + crate::auth::me::api_auth_me, + crate::auth::password::api_user_change_password, + crate::auth::password::api_user_request_password_reset, + crate::auth::totp::api_2fa_enable, + crate::auth::totp::api_2fa_verify, + crate::auth::totp::api_2fa_disable, + crate::auth::totp::api_2fa_status, + crate::auth::email::api_email_get, + crate::auth::email::api_email_change, + crate::auth::email::api_email_verify, + // Agent + // Agent + crate::agent::code_review::trigger_code_review, + crate::agent::pr_summary::generate_pr_description, + crate::agent::provider::provider_list, + crate::agent::provider::provider_get, + crate::agent::provider::provider_create, + crate::agent::provider::provider_update, + crate::agent::provider::provider_delete, + crate::agent::model::model_list, + crate::agent::model::model_get, + crate::agent::model::model_create, + crate::agent::model::model_update, + crate::agent::model::model_delete, + crate::agent::model_version::model_version_list, + crate::agent::model_version::model_version_get, + crate::agent::model_version::model_version_create, + crate::agent::model_version::model_version_update, + crate::agent::model_version::model_version_delete, + crate::agent::model_pricing::model_pricing_list, + crate::agent::model_pricing::model_pricing_get, + crate::agent::model_pricing::model_pricing_create, + crate::agent::model_pricing::model_pricing_update, + crate::agent::model_pricing::model_pricing_delete, + crate::agent::model_capability::model_capability_list, + crate::agent::model_capability::model_capability_get, + crate::agent::model_capability::model_capability_create, + crate::agent::model_capability::model_capability_update, + crate::agent::model_capability::model_capability_delete, + crate::agent::model_parameter_profile::model_parameter_profile_list, + crate::agent::model_parameter_profile::model_parameter_profile_get, + crate::agent::model_parameter_profile::model_parameter_profile_create, + crate::agent::model_parameter_profile::model_parameter_profile_update, + crate::agent::model_parameter_profile::model_parameter_profile_delete, + // Git init (top-level) + crate::git::init::git_init_bare, + crate::git::init::git_open, + crate::git::init::git_open_workdir, + crate::git::init::git_is_repo, + // Git archive + crate::git::archive::git_archive, + crate::git::archive::git_archive_list, + crate::git::archive::git_archive_summary, + crate::git::archive::git_archive_cached, + crate::git::archive::git_archive_invalidate, + crate::git::archive::git_archive_invalidate_all, + // Git blame + crate::git::blame::git_blame_file, + // Git blob + crate::git::blob::git_readme, + crate::git::blob::git_blob_create, + crate::git::blob::git_blob_get, + crate::git::blob::git_blob_exists, + crate::git::blob::git_blob_is_binary, + crate::git::blob::git_blob_content, + crate::git::blob::git_blob_size, + // Git branch + crate::git::branch::git_branch_list, + crate::git::branch::git_branch_summary, + crate::git::branch::git_branch_create, + crate::git::branch::git_branch_get, + crate::git::branch::git_branch_delete, + crate::git::branch::git_branch_current, + crate::git::branch::git_branch_exists, + crate::git::branch::git_branch_is_head, + crate::git::branch::git_branch_upstream, + crate::git::branch::git_branch_tracking_difference, + crate::git::branch::git_branch_delete_remote, + crate::git::branch::git_branch_rename, + crate::git::branch::git_branch_move, + crate::git::branch::git_branch_set_upstream, + crate::git::branch::git_branch_diff, + crate::git::branch::git_branch_is_detached, + crate::git::branch::git_branch_is_merged, + crate::git::branch::git_branch_merge_base, + crate::git::branch::git_branch_is_ancestor, + crate::git::branch::git_branch_fast_forward, + crate::git::branch::git_branch_is_conflicted, + // Git commit + crate::git::commit::git_commit_log, + crate::git::commit::git_commit_count, + crate::git::commit::git_commit_create, + crate::git::commit::git_commit_graph, + crate::git::commit::git_commit_graph_react, + crate::git::commit::git_commit_walk, + crate::git::commit::git_commit_resolve_rev, + crate::git::commit::git_commit_get, + crate::git::commit::git_commit_amend, + crate::git::commit::git_commit_exists, + crate::git::commit::git_commit_is_commit, + crate::git::commit::git_commit_message, + crate::git::commit::git_commit_summary, + crate::git::commit::git_commit_short_id, + crate::git::commit::git_commit_author, + crate::git::commit::git_commit_tree_id, + crate::git::commit::git_commit_parent_count, + crate::git::commit::git_commit_parent_ids, + crate::git::commit::git_commit_parent, + crate::git::commit::git_commit_first_parent, + crate::git::commit::git_commit_is_merge, + crate::git::commit::git_commit_refs, + crate::git::commit::git_commit_branches, + crate::git::commit::git_commit_tags, + crate::git::commit::git_commit_is_tip, + crate::git::commit::git_commit_ref_count, + crate::git::commit::git_commit_reflog, + crate::git::commit::git_commit_ancestors, + crate::git::commit::git_commit_descendants, + crate::git::commit::git_commit_cherry_pick, + crate::git::commit::git_commit_cherry_pick_abort, + crate::git::commit::git_commit_revert, + crate::git::commit::git_commit_revert_abort, + // Git contributors + crate::git::contributors::git_contributors, + // Git diff + crate::git::diff::git_diff_tree_to_tree, + crate::git::diff::git_diff_commit_to_workdir, + crate::git::diff::git_diff_commit_to_index, + crate::git::diff::git_diff_workdir_to_index, + crate::git::diff::git_diff_index_to_tree, + crate::git::diff::git_diff_stats, + crate::git::diff::git_diff_patch_id, + crate::git::diff::git_diff_side_by_side, + // Git refs + crate::git::refs::git_ref_list, + crate::git::refs::git_ref_create, + crate::git::refs::git_ref_get, + crate::git::refs::git_ref_delete, + crate::git::refs::git_ref_rename, + crate::git::refs::git_ref_update, + crate::git::refs::git_ref_exists, + crate::git::refs::git_ref_target, + // Git repo + crate::git::repo::git_description_get, + crate::git::repo::git_description_set, + crate::git::repo::git_description_reset, + crate::git::repo::git_description_exists, + crate::git::repo::git_update_repo, + crate::git::repo::git_config_entries, + crate::git::repo::git_config_get, + crate::git::repo::git_config_set, + crate::git::repo::git_config_delete, + crate::git::repo::git_config_has, + crate::git::repo::git_merge_analysis, + crate::git::repo::git_merge_analysis_for_ref, + crate::git::repo::git_merge_base, + crate::git::repo::git_merge_commits, + crate::git::repo::git_merge_trees, + crate::git::repo::git_merge_abort, + crate::git::repo::git_merge_is_in_progress, + crate::git::repo::git_mergehead_list, + crate::git::repo::git_merge_is_conflicted, + // Git star + crate::git::star::git_star, + crate::git::star::git_unstar, + crate::git::star::git_is_starred, + crate::git::star::git_star_count, + crate::git::star::git_star_user_list, + // Git branch protection + crate::git::branch_protection::branch_protection_list, + crate::git::branch_protection::branch_protection_get, + crate::git::branch_protection::branch_protection_create, + crate::git::branch_protection::branch_protection_update, + crate::git::branch_protection::branch_protection_delete, + crate::git::branch_protection::branch_protection_check_approvals, + // Git tag + crate::git::tag::git_tag_list, + crate::git::tag::git_tag_list_names, + crate::git::tag::git_tag_summary, + crate::git::tag::git_tag_count, + crate::git::tag::git_tag_create, + crate::git::tag::git_tag_create_lightweight, + crate::git::tag::git_tag_rename, + crate::git::tag::git_tag_update_message, + crate::git::tag::git_tag_get, + crate::git::tag::git_tag_delete, + crate::git::tag::git_tag_exists, + crate::git::tag::git_tag_target, + crate::git::tag::git_tag_is_annotated, + crate::git::tag::git_tag_message, + crate::git::tag::git_tag_tagger, + // Git tree + crate::git::tree::git_tree_get, + crate::git::tree::git_tree_exists, + crate::git::tree::git_tree_list, + crate::git::tree::git_tree_entry, + crate::git::tree::git_tree_entry_by_path, + crate::git::tree::git_tree_entry_by_commit_path, + crate::git::tree::git_tree_entry_count, + crate::git::tree::git_tree_is_empty, + crate::git::tree::git_tree_diffstats, + // Git watch + crate::git::watch::git_watch, + crate::git::watch::git_unwatch, + crate::git::watch::git_is_watched, + crate::git::watch::git_watch_count, + crate::git::watch::git_watch_user_list, + // Git webhook + crate::git::webhook::git_webhook_list, + crate::git::webhook::git_webhook_create, + crate::git::webhook::git_webhook_get, + crate::git::webhook::git_webhook_update, + crate::git::webhook::git_webhook_delete, + // Issue + crate::issue::issue_list, + crate::issue::issue_get, + crate::issue::issue_create, + crate::issue::issue_update, + crate::issue::issue_close, + crate::issue::issue_reopen, + crate::issue::issue_delete, + crate::issue::issue_summary, + crate::issue::issue_label::issue_label_list, + crate::issue::issue_label::issue_label_add, + crate::issue::issue_label::issue_label_remove, + crate::issue::label::label_list, + crate::issue::label::label_create, + crate::issue::label::label_delete, + crate::issue::comment::issue_comment_list, + crate::issue::comment::issue_comment_get, + crate::issue::comment::issue_comment_create, + crate::issue::comment::issue_comment_update, + crate::issue::comment::issue_comment_delete, + crate::issue::comment_reaction::issue_comment_reaction_list, + crate::issue::comment_reaction::issue_comment_reaction_add, + crate::issue::comment_reaction::issue_comment_reaction_remove, + crate::issue::assignee::issue_assignee_list, + crate::issue::assignee::issue_assignee_add, + crate::issue::assignee::issue_assignee_remove, + crate::issue::subscriber::issue_subscriber_list, + crate::issue::subscriber::issue_subscribe, + crate::issue::subscriber::issue_unsubscribe, + crate::issue::reaction::issue_reaction_list, + crate::issue::reaction::issue_reaction_add, + crate::issue::reaction::issue_reaction_remove, + crate::issue::repo::issue_repo_list, + crate::issue::repo::issue_repo_link, + crate::issue::repo::issue_repo_unlink, + crate::issue::pull_request::issue_pull_request_list, + crate::issue::pull_request::issue_pull_request_link, + crate::issue::pull_request::issue_pull_request_unlink, + // Project + crate::project::init::project_create, + crate::project::info::project_info, + crate::project::repo::project_repos, + crate::project::repo::project_repo_create, + crate::project::members::project_members, + crate::project::members::project_update_member_role, + crate::project::members::project_remove_member, + crate::project::labels::project_labels, + crate::project::labels::project_create_label, + crate::project::labels::project_get_label, + crate::project::labels::project_update_label, + crate::project::labels::project_delete_label, + crate::project::like::project_like, + crate::project::like::project_unlike, + crate::project::like::project_is_like, + crate::project::like::project_likes_count, + crate::project::like::project_like_users, + crate::project::watch::project_watch, + crate::project::watch::project_unwatch, + crate::project::watch::project_is_watch, + crate::project::watch::project_watches_count, + crate::project::watch::project_watch_users, + // Boards + crate::project::board::board_list, + crate::project::board::board_get, + crate::project::board::board_create, + crate::project::board::board_update, + crate::project::board::board_delete, + crate::project::board::column_create, + crate::project::board::column_update, + crate::project::board::column_delete, + crate::project::board::card_create, + crate::project::board::card_update, + crate::project::board::card_move, + crate::project::board::card_delete, + crate::project::settings::project_exchange_name, + crate::project::settings::project_exchange_visibility, + crate::project::settings::project_exchange_title, + crate::project::audit::project_audit_logs, + crate::project::audit::project_audit_log, + crate::project::audit::project_log_audit, + crate::project::activity::project_activities, + crate::project::activity::project_log_activity, + crate::project::billing::project_billing, + crate::project::billing::project_billing_history, + crate::project::invitation::project_my_invitations, + crate::project::invitation::project_invitations, + crate::project::invitation::project_invite_user, + crate::project::invitation::project_accept_invitation, + crate::project::invitation::project_reject_invitation, + crate::project::invitation::project_cancel_invitation, + crate::project::join_settings::project_join_settings, + crate::project::join_settings::project_update_join_settings, + crate::project::join_request::project_my_join_requests, + crate::project::join_request::project_join_requests, + crate::project::join_request::project_submit_join_request, + crate::project::join_request::project_process_join_request, + crate::project::join_request::project_cancel_join_request, + crate::project::join_answers::project_join_answers, + crate::project::join_answers::project_submit_join_answers, + crate::project::transfer_repo::project_transfer_repo, + // Pull request + crate::pull_request::pull_request::pull_request_list, + crate::pull_request::pull_request::pull_request_get, + crate::pull_request::pull_request::pull_request_create, + crate::pull_request::pull_request::pull_request_update, + crate::pull_request::pull_request::pull_request_delete, + crate::pull_request::pull_request::pull_request_close, + crate::pull_request::pull_request::pull_request_reopen, + crate::pull_request::pull_request::pull_request_summary, + crate::pull_request::pull_request::review_list, + crate::pull_request::pull_request::review_submit, + crate::pull_request::pull_request::review_update, + crate::pull_request::pull_request::review_delete, + crate::pull_request::pull_request::review_comment_list, + crate::pull_request::pull_request::review_comment_create, + crate::pull_request::pull_request::review_comment_update, + crate::pull_request::pull_request::review_comment_delete, + crate::pull_request::pull_request::pr_diff_side_by_side, + crate::pull_request::pull_request::pr_commits_list, + crate::pull_request::review_comment::review_comment_resolve, + crate::pull_request::review_comment::review_comment_unresolve, + crate::pull_request::review_comment::review_comment_reply, + crate::pull_request::review_request::review_request_list, + crate::pull_request::review_request::review_request_create, + crate::pull_request::review_request::review_request_delete, + crate::pull_request::review_request::review_request_dismiss, + crate::pull_request::merge::merge_analysis, + crate::pull_request::merge::merge_conflict_check, + crate::pull_request::merge::merge_execute, + crate::pull_request::merge::merge_abort, + crate::pull_request::merge::merge_is_in_progress, + // Room + crate::room::room::room_list, + crate::room::room::room_get, + crate::room::room::room_create, + crate::room::room::room_update, + crate::room::room::room_delete, + crate::room::category::category_list, + crate::room::category::category_create, + crate::room::category::category_update, + crate::room::category::category_delete, + crate::room::message::message_create, + crate::room::message::message_update, + crate::room::message::message_revoke, + crate::room::thread::thread_list, + crate::room::thread::thread_create, + crate::room::thread::thread_messages, + crate::room::member::member_list, + crate::room::member::member_add, + crate::room::member::member_remove, + crate::room::member::member_set_read_seq, + crate::room::member::member_update_role, + crate::room::pin::pin_list, + crate::room::pin::pin_add, + crate::room::pin::pin_remove, + crate::room::ai::ai_list, + crate::room::ai::ai_upsert, + crate::room::ai::ai_delete, + crate::room::notification::notification_list, + crate::room::notification::notification_mark_read, + crate::room::notification::notification_mark_all_read, + crate::room::notification::notification_archive, + crate::room::draft_and_history::message_edit_history, + crate::room::draft_and_history::mention_list, + crate::room::draft_and_history::mention_read_all, + // Search + crate::search::service::search, + crate::room::reaction::message_search, + // User + crate::user::profile::get_my_profile, + crate::user::profile::update_my_profile, + crate::user::profile::get_profile_by_username, + crate::user::preferences::get_preferences, + crate::user::preferences::update_preferences, + crate::user::ssh_key::add_ssh_key, + crate::user::ssh_key::list_ssh_keys, + crate::user::ssh_key::get_ssh_key, + crate::user::ssh_key::update_ssh_key, + crate::user::ssh_key::delete_ssh_key, + crate::user::access_key::create_access_key, + crate::user::access_key::list_access_keys, + crate::user::access_key::delete_access_key, + crate::user::notification::get_notification_preferences, + crate::user::notification::update_notification_preferences, + crate::user::chpc::get_my_contribution_heatmap, + crate::user::chpc::get_contribution_heatmap, + crate::user::projects::get_current_user_projects, + crate::user::projects::get_user_projects, + crate::user::repository::get_current_user_repos, + crate::user::repository::get_user_repos, + crate::user::subscribe::subscribe_target, + crate::user::subscribe::unsubscribe_target, + crate::user::subscribe::is_subscribed_to_target, + crate::user::subscribe::get_subscribers, + crate::user::subscribe::get_subscription_count, + crate::user::subscribe::get_subscriber_count, + crate::user::user_info::get_user_info, + // Skill + crate::skill::skill_list, + crate::skill::skill_get, + crate::skill::skill_create, + crate::skill::skill_update, + crate::skill::skill_delete, + crate::skill::skill_scan, + // Workspace + crate::workspace::init::workspace_create, + crate::workspace::info::workspace_list, + crate::workspace::info::workspace_info, + crate::workspace::projects::workspace_projects, + crate::workspace::stats::workspace_stats, + crate::workspace::billing::workspace_billing_current, + crate::workspace::billing::workspace_billing_history, + crate::workspace::billing::workspace_billing_add_credit, + crate::workspace::members::workspace_members, + crate::workspace::members::workspace_update_member_role, + crate::workspace::members::workspace_remove_member, + crate::workspace::members::workspace_invite_member, + crate::workspace::members::workspace_pending_invitations, + crate::workspace::members::workspace_cancel_invitation, + crate::workspace::members::workspace_accept_invitation, + crate::workspace::settings::workspace_update, + crate::workspace::settings::workspace_delete, + ), + components( + schemas( + // Core API types + crate::error::ApiError, + // Pager + service::Pager, + // Issue + service::issue::IssueCreateRequest, + service::issue::IssueUpdateRequest, + service::issue::IssueResponse, + service::issue::IssueListResponse, + service::issue::IssueSummaryResponse, + service::issue::IssueCommentCreateRequest, + service::issue::IssueCommentUpdateRequest, + service::issue::IssueCommentResponse, + service::issue::IssueCommentListResponse, + service::issue::IssueLabelResponse, + service::issue::IssueAddLabelRequest, + service::issue::LabelResponse, + service::issue::CreateLabelRequest, + service::issue::ReactionAddRequest, + service::issue::ReactionListResponse, + service::issue::ReactionResponse, + service::issue::IssueAssignUserRequest, + service::issue::IssueAssigneeResponse, + service::issue::IssueSubscriberResponse, + service::issue::IssueRepoResponse, + service::issue::IssueLinkRepoRequest, + service::issue::IssuePullRequestResponse, + service::issue::IssueLinkPullRequestRequest, + // Pull request + service::pull_request::PullRequestCreateRequest, + service::pull_request::PullRequestUpdateRequest, + service::pull_request::PullRequestResponse, + service::pull_request::PullRequestListResponse, + service::pull_request::PullRequestSummaryResponse, + service::pull_request::PrCommitsListResponse, + service::pull_request::PrCommitResponse, + service::pull_request::ReviewSubmitRequest, + service::pull_request::ReviewUpdateRequest, + service::pull_request::ReviewResponse, + service::pull_request::ReviewListResponse, + service::pull_request::ReviewCommentCreateRequest, + service::pull_request::ReviewCommentUpdateRequest, + service::pull_request::ReviewCommentResponse, + service::pull_request::ReviewCommentListResponse, + service::pull_request::ReviewCommentListQuery, + service::pull_request::ReviewCommentReplyRequest, + service::pull_request::ReviewRequestCreateRequest, + service::pull_request::ReviewRequestResponse, + service::pull_request::ReviewRequestListResponse, + service::git::diff::SideBySideDiffResponse, + service::git::diff::SideBySideDiffQuery, + service::pull_request::MergeAnalysisResponse, + service::pull_request::MergeConflictResponse, + service::pull_request::MergeRequest, + service::pull_request::MergeResponse, + // Git branch protection + service::git::branch_protection::BranchProtectionResponse, + service::git::branch_protection::BranchProtectionCreateRequest, + service::git::branch_protection::BranchProtectionUpdateRequest, + service::git::branch_protection::ApprovalCheckResult, + service::git::branch_protection::ReviewerInfo, + // Project + service::project::init::ProjectInitParams, + service::project::init::ProjectInitResponse, + service::project::info::ProjectInfoRelational, + service::project::repo::ProjectRepositoryPagination, + service::project::repo::ProjectRepositoryItem, + service::project::repo::ProjectRepoCreateParams, + service::project::repo::ProjectRepoCreateResponse, + service::project::members::MemberListResponse, + service::project::members::UpdateMemberRoleRequest, + service::project::labels::LabelListResponse, + service::project::labels::LabelResponse, + service::project::labels::CreateLabelParams, + service::project::labels::UpdateLabelParams, + service::project::like::LikeUserInfo, + service::project::watch::WatchUserInfo, + service::project::audit::AuditLogResponse, + service::project::audit::AuditLogParams, + service::project::activity::ActivityLogResponse, + service::project::activity::ActivityLogParams, + service::project::activity::ActivityLogListResponse, + // Skill + service::skill::info::SkillResponse, + service::skill::manage::CreateSkillRequest, + service::skill::manage::UpdateSkillRequest, + service::skill::manage::DeleteSkillResponse, + crate::skill::ScanResponse, + // Boards + service::project::board::BoardResponse, + service::project::board::BoardWithColumnsResponse, + service::project::board::ColumnResponse, + service::project::board::ColumnWithCardsResponse, + service::project::board::CardResponse, + service::project::board::CreateBoardParams, + service::project::board::UpdateBoardParams, + service::project::board::CreateColumnParams, + service::project::board::UpdateColumnParams, + service::project::board::CreateCardParams, + service::project::board::UpdateCardParams, + service::project::board::MoveCardParams, + service::project::billing::ProjectBillingCurrentResponse, + service::project::billing::ProjectBillingHistoryResponse, + service::project::billing::ProjectBillingHistoryQuery, + service::project::invitation::InvitationListResponse, + service::project::join_settings::JoinSettingsResponse, + service::project::join_settings::UpdateJoinSettingsRequest, + service::project::join_request::JoinRequestListResponse, + service::project::join_request::SubmitJoinRequest, + service::project::join_request::ProcessJoinRequest, + service::project::join_answers::JoinAnswersListResponse, + service::project::join_answers::AnswerRequest, + service::project::transfer_repo::TransferRepoParams, + service::project::transfer_repo::TransferRepoResponse, + // Agent + service::agent::code_review::TriggerCodeReviewRequest, + service::agent::code_review::TriggerCodeReviewResponse, + service::agent::code_review::CommentCreated, + service::agent::pr_summary::GeneratePrDescriptionRequest, + service::agent::pr_summary::GeneratePrDescriptionResponse, + service::agent::provider::ProviderResponse, + service::agent::provider::CreateProviderRequest, + service::agent::provider::UpdateProviderRequest, + service::agent::model::ModelResponse, + service::agent::model::CreateModelRequest, + service::agent::model::UpdateModelRequest, + service::agent::model_version::ModelVersionResponse, + service::agent::model_version::CreateModelVersionRequest, + service::agent::model_version::UpdateModelVersionRequest, + service::agent::model_pricing::ModelPricingResponse, + service::agent::model_pricing::CreateModelPricingRequest, + service::agent::model_pricing::UpdateModelPricingRequest, + service::agent::model_capability::ModelCapabilityResponse, + service::agent::model_capability::CreateModelCapabilityRequest, + service::agent::model_capability::UpdateModelCapabilityRequest, + service::agent::model_parameter_profile::ModelParameterProfileResponse, + service::agent::model_parameter_profile::CreateModelParameterProfileRequest, + service::agent::model_parameter_profile::UpdateModelParameterProfileRequest, + // User + service::user::profile::ProfileResponse, + service::user::profile::UpdateProfileParams, + service::user::preferences::PreferencesResponse, + service::user::preferences::PreferencesParams, + service::user::ssh_key::SshKeyResponse, + service::user::ssh_key::SshKeyListResponse, + service::user::ssh_key::AddSshKeyParams, + service::user::ssh_key::UpdateSshKeyParams, + service::user::access_key::AccessKeyResponse, + service::user::access_key::AccessKeyListResponse, + service::user::access_key::CreateAccessKeyParams, + service::user::notification::NotificationPreferencesResponse, + service::user::notification::NotificationPreferencesParams, + service::user::chpc::ContributionHeatmapResponse, + service::user::chpc::ContributionHeatmapQuery, + service::user::projects::UserProjectsResponse, + service::user::projects::UserProjectsQuery, + service::user::repository::UserReposResponse, + service::user::repository::UserReposQuery, + service::user::subscribe::SubscriptionInfo, + service::user::user_info::UserInfoExternal, + // Workspace + service::workspace::init::WorkspaceInitParams, + service::workspace::info::WorkspaceInfoResponse, + service::workspace::info::WorkspaceListItem, + service::workspace::info::WorkspaceListResponse, + service::workspace::info::WorkspaceProjectsQuery, + service::workspace::info::WorkspaceProjectsResponse, + service::workspace::info::WorkspaceProjectItem, + service::workspace::info::WorkspaceStatsResponse, + service::workspace::billing::WorkspaceBillingCurrentResponse, + service::workspace::billing::WorkspaceBillingHistoryResponse, + service::workspace::billing::WorkspaceBillingHistoryQuery, + service::workspace::billing::WorkspaceBillingAddCreditParams, + service::workspace::members::WorkspaceMemberInfo, + service::workspace::members::WorkspaceMembersResponse, + service::workspace::members::WorkspaceInviteParams, + service::workspace::members::WorkspaceInviteAcceptParams, + service::workspace::members::PendingInvitationInfo, + service::workspace::settings::WorkspaceUpdateParams, + // Room + room::RoomResponse, + room::RoomCreateRequest, + room::RoomUpdateRequest, + room::RoomCategoryResponse, + room::RoomCategoryCreateRequest, + room::RoomCategoryUpdateRequest, + room::RoomMemberResponse, + room::RoomMemberAddRequest, + room::RoomMemberRoleUpdateRequest, + room::RoomMemberReadSeqRequest, + room::RoomMessageResponse, + room::RoomMessageCreateRequest, + room::RoomMessageUpdateRequest, + room::RoomMessageListResponse, + room::RoomThreadResponse, + room::RoomThreadCreateRequest, + room::RoomPinResponse, + room::RoomAiResponse, + room::RoomAiUpsertRequest, + room::NotificationResponse, + room::NotificationListResponse, + room::NotificationType, + // Auth service types + service::auth::login::LoginParams, + service::auth::register::RegisterParams, + service::auth::password::ChangePasswordParams, + service::auth::password::ResetPasswordParams, + service::auth::captcha::CaptchaQuery, + service::auth::captcha::CaptchaResponse, + service::auth::me::ContextMe, + service::auth::totp::Enable2FAResponse, + service::auth::totp::Verify2FAParams, + service::auth::totp::Disable2FAParams, + service::auth::totp::Get2FAStatusResponse, + service::auth::email::EmailChangeRequest, + service::auth::email::EmailVerifyRequest, + service::auth::email::EmailResponse, + // Git init + service::git::init::GitInitRequest, + service::git::init::GitInitResponse, + // Git blob + service::git::blob::GitReadmeQuery, + service::git::blob::GitReadmeResponse, + // Git webhook + service::git::webhook::WebhookEvent, + service::git::webhook::CreateWebhookParams, + service::git::webhook::UpdateWebhookParams, + service::git::webhook::WebhookResponse, + service::git::webhook::WebhookListResponse, + // Search + service::search::SearchResponse, + service::search::SearchResultSet, + service::search::SearchResultSet, + service::search::SearchResultSet, + service::search::SearchResultSet, + service::search::ProjectSearchItem, + service::search::RepoSearchItem, + service::search::IssueSearchItem, + service::search::UserSearchItem, + ) + ), + tags( + (name = "Auth", description = "Authentication and user identity"), + (name = "Agent", description = "AI agent model management"), + (name = "Git", description = "Git repository operations"), + (name = "Issues", description = "Issue tracking"), + (name = "Project", description = "Project management"), + (name = "PullRequest", description = "Pull request management"), + (name = "Room", description = "Real-time chat rooms"), + (name = "Search", description = "Global and room message search"), + (name = "User", description = "User profiles and settings"), + (name = "Workspace", description = "Workspace management and collaboration"), + ) +)] +pub struct OpenApiDoc; diff --git a/libs/api/project/activity.rs b/libs/api/project/activity.rs new file mode 100644 index 0000000..6b2a473 --- /dev/null +++ b/libs/api/project/activity.rs @@ -0,0 +1,88 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::error::AppError; +use service::project::activity::{ActivityLogListResponse, ActivityLogParams, ActivityLogResponse}; +use session::Session; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ActivityQuery { + pub page: Option, + pub per_page: Option, + pub event_type: Option, + pub start_date: Option, + pub end_date: Option, +} + +impl From for service::project::activity::ActivityParams { + fn from(q: ActivityQuery) -> Self { + service::project::activity::ActivityParams { + event_type: q.event_type, + start_date: q.start_date, + end_date: q.end_date, + } + } +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/activities", + params( + ("project_name" = String, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ("event_type" = Option, Query), + ("start_date" = Option, Query, description = "ISO 8601 datetime, e.g. 2025-01-01T00:00:00Z"), + ("end_date" = Option, Query, description = "ISO 8601 datetime, e.g. 2025-12-31T23:59:59Z"), + ), + responses( + (status = 200, description = "List project activities", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden — no access to this project"), + (status = 404, description = "Project not found"), + ), + tag = "Project" +)] +pub async fn project_activities( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let page = query.page; + let per_page = query.per_page; + let params = service::project::activity::ActivityParams::from(query.into_inner()); + let resp = service + .project_get_activities(project_name, page, per_page, Some(params), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/activities", + params(("project_name" = String, Path)), + request_body = ActivityLogParams, + responses( + (status = 200, description = "Activity logged", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Project not found"), + ), + tag = "Project" +)] +pub async fn project_log_activity( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let _project = service.utils_find_project_by_name(project_name).await?; + let user_uid = session.user().ok_or(AppError::Unauthorized)?; + + let resp = service + .project_log_activity(_project.id, body.repo_id, user_uid, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/audit.rs b/libs/api/project/audit.rs new file mode 100644 index 0000000..9b6b1a5 --- /dev/null +++ b/libs/api/project/audit.rs @@ -0,0 +1,82 @@ +use super::PageQuery; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/audit-logs", + params( + ("project_name" = String, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List project audit logs", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_audit_logs( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_get_audit_logs(project_name, query.page, query.per_page, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/audit-logs/{log_id}", + params( + ("project_name" = String, Path), + ("log_id" = i64, Path), + ), + responses( + (status = 200, description = "Get project audit log", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_audit_log( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (_project_name, log_id) = path.into_inner(); + let resp = service.project_get_audit_log(log_id, &session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/audit-logs", + params(("project_name" = String, Path)), + request_body = service::project::audit::AuditLogParams, + responses( + (status = 200, description = "Log project audit event", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_log_audit( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_log_audit(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/billing.rs b/libs/api/project/billing.rs new file mode 100644 index 0000000..59c99ef --- /dev/null +++ b/libs/api/project/billing.rs @@ -0,0 +1,53 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/billing", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get project billing", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_billing( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_billing_current(&session, project_name) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/billing/history", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get project billing history", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_billing_history( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_billing_history(&session, project_name, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/board.rs b/libs/api/project/board.rs new file mode 100644 index 0000000..6e5f1bc --- /dev/null +++ b/libs/api/project/board.rs @@ -0,0 +1,324 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::project::board::{ + BoardResponse, BoardWithColumnsResponse, CardResponse, ColumnResponse, CreateBoardParams, + CreateCardParams, CreateColumnParams, MoveCardParams, UpdateBoardParams, UpdateCardParams, + UpdateColumnParams, +}; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/boards", + params( + ("project_name" = String, Path, description = "Project name"), + ), + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "List boards", body = ApiResponse>), + ), + tag = "Project" +)] +pub async fn board_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let boards = service.board_list(project_name, &session).await?; + Ok(ApiResponse::ok(boards).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/boards/{board_id}", + params( + ("project_name" = String, Path), + ("board_id" = Uuid, Path), + ), + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Get board with columns and cards", body = ApiResponse), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn board_get( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (project_name, board_id) = path.into_inner(); + let board = service.board_get(project_name, board_id, &session).await?; + Ok(ApiResponse::ok(board).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/boards", + params( + ("project_name" = String, Path), + ), + request_body = CreateBoardParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Create board", body = ApiResponse), + ), + tag = "Project" +)] +pub async fn board_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let board = service + .board_create(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(board).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/boards/{board_id}", + params( + ("project_name" = String, Path), + ("board_id" = Uuid, Path), + ), + request_body = UpdateBoardParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Update board", body = ApiResponse), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn board_update( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, + body: web::Json, +) -> Result { + let (project_name, board_id) = path.into_inner(); + let board = service + .board_update(project_name, board_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(board).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/boards/{board_id}", + params( + ("project_name" = String, Path), + ("board_id" = Uuid, Path), + ), + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Delete board"), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn board_delete( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (project_name, board_id) = path.into_inner(); + service + .board_delete(project_name, board_id, &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/boards/{board_id}/columns", + params( + ("project_name" = String, Path), + ("board_id" = Uuid, Path), + ), + request_body = CreateColumnParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Create column", body = ApiResponse), + (status = 404, description = "Board not found"), + ), + tag = "Project" +)] +pub async fn column_create( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, + body: web::Json, +) -> Result { + let (project_name, board_id) = path.into_inner(); + let column = service + .column_create(project_name, board_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(column).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/columns/{column_id}", + params( + ("project_name" = String, Path), + ("column_id" = Uuid, Path), + ), + request_body = UpdateColumnParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Update column", body = ApiResponse), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn column_update( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, + body: web::Json, +) -> Result { + let (project_name, column_id) = path.into_inner(); + let column = service + .column_update(project_name, column_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(column).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/columns/{column_id}", + params( + ("project_name" = String, Path), + ("column_id" = Uuid, Path), + ), + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Delete column"), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn column_delete( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (project_name, column_id) = path.into_inner(); + service + .column_delete(project_name, column_id, &session) + .await?; + Ok(crate::api_success()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/cards", + params( + ("project_name" = String, Path), + ), + request_body = CreateCardParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Create card", body = ApiResponse), + ), + tag = "Project" +)] +pub async fn card_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let card = service + .card_create(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(card).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/cards/{card_id}", + params( + ("project_name" = String, Path), + ("card_id" = Uuid, Path), + ), + request_body = UpdateCardParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Update card", body = ApiResponse), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn card_update( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, + body: web::Json, +) -> Result { + let (project_name, card_id) = path.into_inner(); + let card = service + .card_update(project_name, card_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(card).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/cards/{card_id}/move", + params( + ("project_name" = String, Path), + ("card_id" = Uuid, Path), + ), + request_body = MoveCardParams, + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Move card", body = ApiResponse), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn card_move( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, + body: web::Json, +) -> Result { + let (project_name, card_id) = path.into_inner(); + let card = service + .card_move(project_name, card_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(card).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/cards/{card_id}", + params( + ("project_name" = String, Path), + ("card_id" = Uuid, Path), + ), + responses( + (status = 401, description = "Unauthorized"), + (status = 200, description = "Delete card"), + (status = 404, description = "Not found"), + ), + tag = "Project" +)] +pub async fn card_delete( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (project_name, card_id) = path.into_inner(); + service.card_delete(project_name, card_id, &session).await?; + Ok(crate::api_success()) +} diff --git a/libs/api/project/info.rs b/libs/api/project/info.rs new file mode 100644 index 0000000..efce56e --- /dev/null +++ b/libs/api/project/info.rs @@ -0,0 +1,25 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get project info", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_info( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_info(&session, project_name).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/init.rs b/libs/api/project/init.rs new file mode 100644 index 0000000..9cd93b4 --- /dev/null +++ b/libs/api/project/init.rs @@ -0,0 +1,24 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/projects", + request_body = service::project::init::ProjectInitParams, + responses( + (status = 200, description = "Create project", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service.project_init(&session, body.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/invitation.rs b/libs/api/project/invitation.rs new file mode 100644 index 0000000..7f343cc --- /dev/null +++ b/libs/api/project/invitation.rs @@ -0,0 +1,168 @@ +use super::PageQuery; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use models::projects::MemberRole; +use service::AppService; +use session::Session; + +#[derive(serde::Deserialize, utoipa::ToSchema)] +pub struct InviteUserRequest { + pub email: String, + pub scope: MemberRole, +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/invitations", + params( + ("project_name" = String, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List project invitations", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_invitations( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_get_invitations(project_name, query.page, query.per_page, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/me/invitations", + responses( + (status = 200, description = "List my invitations", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_my_invitations( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service + .project_get_my_invitations(query.page, query.per_page, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/invitations", + params(("project_name" = String, Path)), + request_body = InviteUserRequest, + responses( + (status = 200, description = "Invite user to project"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_invite_user( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + service + .project_invite_user( + project_name, + body.email.clone(), + body.scope.clone(), + &session, + ) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/invitations/accept", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Accept project invitation"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_accept_invitation( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service + .project_accept_invitation(project_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/invitations/reject", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Reject project invitation"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_reject_invitation( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service + .project_reject_invitation(project_name, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/invitations/{user_id}", + params( + ("project_name" = String, Path), + ("user_id" = String, Path), + ), + responses( + (status = 200, description = "Cancel project invitation"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_cancel_invitation( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (project_name, user_id) = path.into_inner(); + let user_uuid = uuid::Uuid::parse_str(&user_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service + .project_cancel_invitation(project_name, user_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/project/join_answers.rs b/libs/api/project/join_answers.rs new file mode 100644 index 0000000..e6a1862 --- /dev/null +++ b/libs/api/project/join_answers.rs @@ -0,0 +1,59 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/join-requests/{request_id}/answers", + params( + ("project_name" = String, Path), + ("request_id" = i64, Path), + ), + responses( + (status = 200, description = "Get join request answers", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_join_answers( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project_name, request_id) = path.into_inner(); + let resp = service + .project_get_join_answers(project_name, request_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/join-requests/{request_id}/answers", + params( + ("project_name" = String, Path), + ("request_id" = i64, Path), + ), + request_body = Vec, + responses( + (status = 200, description = "Submit join request answers"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_submit_join_answers( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json>, +) -> Result { + let (project_name, request_id) = path.into_inner(); + service + .project_submit_join_answers(project_name, request_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/project/join_request.rs b/libs/api/project/join_request.rs new file mode 100644 index 0000000..d7c2094 --- /dev/null +++ b/libs/api/project/join_request.rs @@ -0,0 +1,142 @@ +use super::{JoinRequestQuery, PageQuery}; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/join-requests", + params( + ("project_name" = String, Path), + ("status" = Option, Query), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List join requests", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_join_requests( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_get_join_requests( + project_name, + query.status.clone(), + query.page, + query.per_page, + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/join-requests", + params(("project_name" = String, Path)), + request_body = service::project::join_request::SubmitJoinRequest, + responses( + (status = 200, description = "Submit join request"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_submit_join_request( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_submit_join_request(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "request_id": resp })).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/join-requests/{request_id}", + params( + ("project_name" = String, Path), + ("request_id" = i64, Path), + ), + request_body = service::project::join_request::ProcessJoinRequest, + responses( + (status = 200, description = "Process join request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_process_join_request( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (project_name, request_id) = path.into_inner(); + service + .project_process_join_request(project_name, request_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/join-requests/{request_id}", + params( + ("project_name" = String, Path), + ("request_id" = i64, Path), + ), + responses( + (status = 200, description = "Cancel join request"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_cancel_join_request( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (project_name, request_id) = path.into_inner(); + service + .project_cancel_join_request(project_name, request_id, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/me/join-requests", + responses( + (status = 200, description = "List my join requests", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_my_join_requests( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service + .project_get_my_join_requests(query.page, query.per_page, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/join_settings.rs b/libs/api/project/join_settings.rs new file mode 100644 index 0000000..482f9c5 --- /dev/null +++ b/libs/api/project/join_settings.rs @@ -0,0 +1,53 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/join-settings", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get join settings", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_join_settings( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_get_join_settings(project_name, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/join-settings", + params(("project_name" = String, Path)), + request_body = service::project::join_settings::UpdateJoinSettingsRequest, + responses( + (status = 200, description = "Update join settings", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_update_join_settings( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_update_join_settings(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/labels.rs b/libs/api/project/labels.rs new file mode 100644 index 0000000..cebc106 --- /dev/null +++ b/libs/api/project/labels.rs @@ -0,0 +1,129 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/labels", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "List project labels", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_labels( + service: web::Data, + _session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_get_labels(project_name).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/labels", + params(("project_name" = String, Path)), + request_body = service::project::labels::CreateLabelParams, + responses( + (status = 200, description = "Create project label", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_create_label( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_create_label(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/labels/{label_id}", + params( + ("project_name" = String, Path), + ("label_id" = i64, Path), + ), + responses( + (status = 200, description = "Get project label", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_get_label( + service: web::Data, + _session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (_project_name, label_id) = path.into_inner(); + let resp = service.project_get_label(label_id).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/labels/{label_id}", + params( + ("project_name" = String, Path), + ("label_id" = i64, Path), + ), + request_body = service::project::labels::UpdateLabelParams, + responses( + (status = 200, description = "Update project label", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_update_label( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, + body: web::Json, +) -> Result { + let (_project_name, label_id) = path.into_inner(); + let resp = service + .project_update_label(label_id, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/labels/{label_id}", + params( + ("project_name" = String, Path), + ("label_id" = i64, Path), + ), + responses( + (status = 200, description = "Delete project label"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_delete_label( + service: web::Data, + session: Session, + path: web::Path<(String, i64)>, +) -> Result { + let (_project_name, label_id) = path.into_inner(); + service.project_delete_label(label_id, &session).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/project/like.rs b/libs/api/project/like.rs new file mode 100644 index 0000000..93e016a --- /dev/null +++ b/libs/api/project/like.rs @@ -0,0 +1,116 @@ +use super::UserPagerQuery; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[derive(serde::Serialize, utoipa::ToSchema)] +pub struct IsLikeResponse { + pub is_like: bool, +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/like", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Like project"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_like( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service.project_like(&session, project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/like", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Unlike project"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_unlike( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service.project_unlike(&session, project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/like", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Check if user likes project", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_is_like( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_is_like(&session, project_name).await?; + Ok(ApiResponse::ok(IsLikeResponse { is_like: resp }).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/likes/count", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get like count"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_likes_count( + service: web::Data, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_likes(project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "count": resp })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/likes/users", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "List users who liked project", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_like_users( + service: web::Data, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_like_user_list(project_name, query.into_inner().into()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/members.rs b/libs/api/project/members.rs new file mode 100644 index 0000000..4079b0c --- /dev/null +++ b/libs/api/project/members.rs @@ -0,0 +1,88 @@ +use super::PageQuery; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/members", + params( + ("project_name" = String, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List project members", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_members( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_get_members(project_name, query.page, query.per_page, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/members/role", + params(("project_name" = String, Path)), + request_body = service::project::members::UpdateMemberRoleRequest, + responses( + (status = 200, description = "Update member role"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_update_member_role( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + service + .project_update_member_role(project_name, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/members/{user_id}", + params( + ("project_name" = String, Path), + ("user_id" = String, Path), + ), + responses( + (status = 200, description = "Remove member from project"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_remove_member( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (project_name, user_id) = path.into_inner(); + let user_uuid = uuid::Uuid::parse_str(&user_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid UUID".to_string()))?; + service + .project_remove_member(project_name, user_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/project/mod.rs b/libs/api/project/mod.rs new file mode 100644 index 0000000..94fd257 --- /dev/null +++ b/libs/api/project/mod.rs @@ -0,0 +1,281 @@ +pub mod activity; +pub mod audit; +pub mod billing; +pub mod board; +pub mod info; +pub mod init; +pub mod invitation; +pub mod join_answers; +pub mod join_request; +pub mod join_settings; +pub mod labels; +pub mod like; +pub mod members; +pub mod repo; +pub mod settings; +pub mod transfer_repo; +pub mod watch; + +use actix_web::web; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct PageQuery { + pub page: Option, + pub per_page: Option, +} + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct RepoPagerQuery { + pub limit: Option, + pub cursor: Option, +} + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct UserPagerQuery { + pub page: Option, + pub par_page: Option, +} + +impl From for service::Pager { + fn from(q: UserPagerQuery) -> Self { + service::Pager { + page: q.page.unwrap_or(1), + par_page: q.par_page.unwrap_or(20), + } + } +} + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct JoinRequestQuery { + pub status: Option, + pub page: Option, + pub per_page: Option, +} + +pub fn init_project_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/projects") + .route("", web::post().to(init::project_create)) + .route( + "/me/invitations", + web::get().to(invitation::project_my_invitations), + ) + .route( + "/me/join-requests", + web::get().to(join_request::project_my_join_requests), + ) + .route("/{project_name}", web::get().to(info::project_info)) + .route("/{project_name}/repos", web::get().to(repo::project_repos)) + .route( + "/{project_name}/repos", + web::post().to(repo::project_repo_create), + ) + .route( + "/{project_name}/members", + web::get().to(members::project_members), + ) + .route( + "/{project_name}/members/role", + web::patch().to(members::project_update_member_role), + ) + .route( + "/{project_name}/members/{user_id}", + web::delete().to(members::project_remove_member), + ) + .route( + "/{project_name}/labels", + web::get().to(labels::project_labels), + ) + .route( + "/{project_name}/labels", + web::post().to(labels::project_create_label), + ) + .route( + "/{project_name}/labels/{label_id}", + web::get().to(labels::project_get_label), + ) + .route( + "/{project_name}/labels/{label_id}", + web::patch().to(labels::project_update_label), + ) + .route( + "/{project_name}/labels/{label_id}", + web::delete().to(labels::project_delete_label), + ) + .route("/{project_name}/like", web::post().to(like::project_like)) + .route( + "/{project_name}/like", + web::delete().to(like::project_unlike), + ) + .route("/{project_name}/like", web::get().to(like::project_is_like)) + .route( + "/{project_name}/likes/count", + web::get().to(like::project_likes_count), + ) + .route( + "/{project_name}/likes/users", + web::get().to(like::project_like_users), + ) + .route( + "/{project_name}/watch", + web::post().to(watch::project_watch), + ) + .route( + "/{project_name}/watch", + web::delete().to(watch::project_unwatch), + ) + .route( + "/{project_name}/watch", + web::get().to(watch::project_is_watch), + ) + .route( + "/{project_name}/watches/count", + web::get().to(watch::project_watches_count), + ) + .route( + "/{project_name}/watches/users", + web::get().to(watch::project_watch_users), + ) + .route( + "/{project_name}/settings/name", + web::patch().to(settings::project_exchange_name), + ) + .route( + "/{project_name}/settings/visibility", + web::patch().to(settings::project_exchange_visibility), + ) + .route( + "/{project_name}/settings/title", + web::patch().to(settings::project_exchange_title), + ) + .route( + "/{project_name}/audit-logs", + web::get().to(audit::project_audit_logs), + ) + .route( + "/{project_name}/audit-logs/{log_id}", + web::get().to(audit::project_audit_log), + ) + .route( + "/{project_name}/audit-logs", + web::post().to(audit::project_log_audit), + ) + .route( + "/{project_name}/activities", + web::get().to(activity::project_activities), + ) + .route( + "/{project_name}/activities", + web::post().to(activity::project_log_activity), + ) + .route( + "/{project_name}/billing", + web::get().to(billing::project_billing), + ) + .route( + "/{project_name}/billing/history", + web::get().to(billing::project_billing_history), + ) + .route( + "/{project_name}/invitations", + web::get().to(invitation::project_invitations), + ) + .route( + "/{project_name}/invitations", + web::post().to(invitation::project_invite_user), + ) + .route( + "/{project_name}/invitations/accept", + web::post().to(invitation::project_accept_invitation), + ) + .route( + "/{project_name}/invitations/reject", + web::post().to(invitation::project_reject_invitation), + ) + .route( + "/{project_name}/invitations/{user_id}", + web::delete().to(invitation::project_cancel_invitation), + ) + .route( + "/{project_name}/join-settings", + web::get().to(join_settings::project_join_settings), + ) + .route( + "/{project_name}/join-settings", + web::patch().to(join_settings::project_update_join_settings), + ) + .route( + "/{project_name}/join-requests", + web::get().to(join_request::project_join_requests), + ) + .route( + "/{project_name}/join-requests", + web::post().to(join_request::project_submit_join_request), + ) + .route( + "/{project_name}/join-requests/{request_id}", + web::patch().to(join_request::project_process_join_request), + ) + .route( + "/{project_name}/join-requests/{request_id}", + web::delete().to(join_request::project_cancel_join_request), + ) + .route( + "/{project_name}/join-requests/{request_id}/answers", + web::get().to(join_answers::project_join_answers), + ) + .route( + "/{project_name}/join-requests/{request_id}/answers", + web::post().to(join_answers::project_submit_join_answers), + ) + .route( + "/{source_project}/repos/{repo_name}/transfer", + web::post().to(transfer_repo::project_transfer_repo), + ) + // Boards + .route("/{project_name}/boards", web::get().to(board::board_list)) + .route( + "/{project_name}/boards", + web::post().to(board::board_create), + ) + .route( + "/{project_name}/boards/{board_id}", + web::get().to(board::board_get), + ) + .route( + "/{project_name}/boards/{board_id}", + web::patch().to(board::board_update), + ) + .route( + "/{project_name}/boards/{board_id}", + web::delete().to(board::board_delete), + ) + // Columns + .route( + "/{project_name}/boards/{board_id}/columns", + web::post().to(board::column_create), + ) + .route( + "/{project_name}/columns/{column_id}", + web::patch().to(board::column_update), + ) + .route( + "/{project_name}/columns/{column_id}", + web::delete().to(board::column_delete), + ) + // Cards + .route("/{project_name}/cards", web::post().to(board::card_create)) + .route( + "/{project_name}/cards/{card_id}", + web::patch().to(board::card_update), + ) + .route( + "/{project_name}/cards/{card_id}/move", + web::post().to(board::card_move), + ) + .route( + "/{project_name}/cards/{card_id}", + web::delete().to(board::card_delete), + ), + ); +} diff --git a/libs/api/project/repo.rs b/libs/api/project/repo.rs new file mode 100644 index 0000000..ee20c25 --- /dev/null +++ b/libs/api/project/repo.rs @@ -0,0 +1,57 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::project::repo::{ + ProjectRepoCreateParams, ProjectRepoCreateResponse, ProjectRepositoryQuery, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/repos", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get project repositories", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Project not found"), + ), + tag = "Project" +)] +pub async fn project_repos( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_repo(&session, project_name, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/repos", + params(("project_name" = String, Path)), + request_body = ProjectRepoCreateParams, + responses( + (status = 200, description = "Create a repository", body = ApiResponse), + (status = 400, description = "Bad request"), + (status = 401, description = "Unauthorized"), + (status = 409, description = "Repository name already exists"), + ), + tag = "Project" +)] +pub async fn project_repo_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_repo_create(&session, project_name, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/settings.rs b/libs/api/project/settings.rs new file mode 100644 index 0000000..3ff391c --- /dev/null +++ b/libs/api/project/settings.rs @@ -0,0 +1,82 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/settings/name", + params(("project_name" = String, Path)), + request_body = service::project::settings::ExchangeProjectName, + responses( + (status = 200, description = "Update project name"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_exchange_name( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + service + .project_exchange_name(&session, project_name, body.into_inner()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/settings/visibility", + params(("project_name" = String, Path)), + request_body = service::project::settings::ExchangeProjectVisibility, + responses( + (status = 200, description = "Update project visibility"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_exchange_visibility( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + service + .project_exchange_visibility(&session, project_name, body.into_inner()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/settings/title", + params(("project_name" = String, Path)), + request_body = service::project::settings::ExchangeProjectTitle, + responses( + (status = 200, description = "Update project title"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_exchange_title( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + service + .project_exchange_title(&session, project_name, body.into_inner()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/project/transfer_repo.rs b/libs/api/project/transfer_repo.rs new file mode 100644 index 0000000..9ad2358 --- /dev/null +++ b/libs/api/project/transfer_repo.rs @@ -0,0 +1,33 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/projects/{source_project}/repos/{repo_name}/transfer", + params( + ("source_project" = String, Path), + ("repo_name" = String, Path), + ), + request_body = service::project::transfer_repo::TransferRepoParams, + responses( + (status = 200, description = "Transfer repo to another project", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_transfer_repo( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (source_project_name, repo_name) = path.into_inner(); + let resp = service + .transfer_repo(&session, source_project_name, repo_name, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/project/watch.rs b/libs/api/project/watch.rs new file mode 100644 index 0000000..a91c489 --- /dev/null +++ b/libs/api/project/watch.rs @@ -0,0 +1,116 @@ +use super::UserPagerQuery; +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[derive(serde::Serialize, utoipa::ToSchema)] +pub struct IsWatchResponse { + pub is_watching: bool, +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/watch", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Watch project"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_watch( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service.project_watch(&session, project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/watch", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Unwatch project"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_unwatch( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + service.project_unwatch(&session, project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/watch", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Check if user watches project", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_is_watch( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_is_watch(&session, project_name).await?; + Ok(ApiResponse::ok(IsWatchResponse { is_watching: resp }).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/watches/count", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Get watch count"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_watches_count( + service: web::Data, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let resp = service.project_watches(project_name).await?; + Ok(ApiResponse::ok(serde_json::json!({ "count": resp })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/watches/users", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "List users watching project", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Project" +)] +pub async fn project_watch_users( + service: web::Data, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let resp = service + .project_watch_user_list(project_name, query.into_inner().into()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/pull_request/merge.rs b/libs/api/pull_request/merge.rs new file mode 100644 index 0000000..6db090b --- /dev/null +++ b/libs/api/pull_request/merge.rs @@ -0,0 +1,144 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Get merge analysis", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_analysis( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_analysis(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/conflicts", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Check merge conflicts", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_conflict_check( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_conflict_check(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::MergeRequest, + responses( + (status = 200, description = "Execute merge", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + (status = 409, description = "Conflict"), +), + tag = "PullRequest" +)] +pub async fn merge_execute( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_execute(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/abort", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Abort merge"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_abort( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + service + .merge_abort(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/in_progress", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Check if merge is in progress"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_is_in_progress( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_is_in_progress(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "in_progress": resp })).to_response()) +} diff --git a/libs/api/pull_request/mod.rs b/libs/api/pull_request/mod.rs new file mode 100644 index 0000000..daee343 --- /dev/null +++ b/libs/api/pull_request/mod.rs @@ -0,0 +1,136 @@ +pub mod merge; +pub mod pull_request; +pub mod review; +pub mod review_comment; +pub mod review_request; + +use actix_web::web; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct ListQuery { + pub status: Option, + pub page: Option, + pub per_page: Option, +} + +pub fn init_pull_request_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/repo_pr/{namespace}/{repo}/pulls") + .route("", web::get().to(pull_request::pull_request_list)) + .route("", web::post().to(pull_request::pull_request_create)) + .route( + "/summary", + web::get().to(pull_request::pull_request_summary), + ) + .route("/{number}", web::get().to(pull_request::pull_request_get)) + .route( + "/{number}", + web::patch().to(pull_request::pull_request_update), + ) + .route( + "/{number}", + web::delete().to(pull_request::pull_request_delete), + ) + .route( + "/{number}/close", + web::post().to(pull_request::pull_request_close), + ) + .route( + "/{number}/reopen", + web::post().to(pull_request::pull_request_reopen), + ) + // reviews (from pull_request module) + .route( + "/{pr_number}/reviews", + web::get().to(pull_request::review_list), + ) + .route( + "/{pr_number}/reviews", + web::post().to(pull_request::review_submit), + ) + .route( + "/{pr_number}/reviews", + web::patch().to(pull_request::review_update), + ) + .route( + "/{pr_number}/reviews/{reviewer_id}", + web::delete().to(pull_request::review_delete), + ) + // review comments (from pull_request module) + .route( + "/{pr_number}/comments", + web::get().to(pull_request::review_comment_list), + ) + .route( + "/{pr_number}/comments", + web::post().to(pull_request::review_comment_create), + ) + .route( + "/{pr_number}/comments/{comment_id}", + web::patch().to(pull_request::review_comment_update), + ) + .route( + "/{pr_number}/comments/{comment_id}", + web::delete().to(pull_request::review_comment_delete), + ) + .route( + "/{pr_number}/comments/{comment_id}/resolve", + web::put().to(review_comment::review_comment_resolve), + ) + .route( + "/{pr_number}/comments/{comment_id}/resolve", + web::delete().to(review_comment::review_comment_unresolve), + ) + .route( + "/{pr_number}/comments/{comment_id}/replies", + web::post().to(review_comment::review_comment_reply), + ) + .route( + "/{pr_number}/commits", + web::get().to(pull_request::pr_commits_list), + ) + // merge + .route( + "/{pr_number}/merge", + web::get().to(pull_request::merge_analysis), + ) + .route( + "/{pr_number}/conflicts", + web::get().to(pull_request::merge_conflict_check), + ) + .route( + "/{pr_number}/merge", + web::post().to(pull_request::merge_execute), + ) + .route( + "/{pr_number}/merge/abort", + web::post().to(pull_request::merge_abort), + ) + .route( + "/{pr_number}/merge/in_progress", + web::get().to(pull_request::merge_is_in_progress), + ) + // side-by-side diff + .route( + "/{pr_number}/diff/side-by-side", + web::get().to(pull_request::pr_diff_side_by_side), + ) + // review requests + .route( + "/{pr_number}/review-requests", + web::get().to(review_request::review_request_list), + ) + .route( + "/{pr_number}/review-requests", + web::post().to(review_request::review_request_create), + ) + .route( + "/{pr_number}/review-requests/{reviewer}", + web::delete().to(review_request::review_request_delete), + ) + .route( + "/{pr_number}/review-requests/{reviewer}/dismiss", + web::post().to(review_request::review_request_dismiss), + ), + ); +} diff --git a/libs/api/pull_request/pull_request.rs b/libs/api/pull_request/pull_request.rs new file mode 100644 index 0000000..70d7dbf --- /dev/null +++ b/libs/api/pull_request/pull_request.rs @@ -0,0 +1,682 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::git::diff::SideBySideDiffQuery; +use service::pull_request::ReviewCommentListQuery; +use session::Session; + +use super::ListQuery; + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("status" = Option, Query), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List pull requests", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_list( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + query: web::Query, +) -> Result { + let (namespace, repo) = path.into_inner(); + let resp = service + .pull_request_list( + namespace, + repo, + query.status.clone(), + Some(query.page.unwrap_or(1)), + Some(query.per_page.unwrap_or(20)), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{number}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Get pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_get( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, number) = path.into_inner(); + let resp = service + .pull_request_get(namespace, repo, number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + request_body = service::pull_request::PullRequestCreateRequest, + responses( + (status = 200, description = "Create pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_create( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, + body: web::Json, +) -> Result { + let (namespace, repo) = path.into_inner(); + let resp = service + .pull_request_create(namespace, repo, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{number}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("number" = i64, Path), + ), + request_body = service::pull_request::PullRequestUpdateRequest, + responses( + (status = 200, description = "Update pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, number) = path.into_inner(); + let resp = service + .pull_request_update(namespace, repo, number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{number}/close", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Close pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_close( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, number) = path.into_inner(); + let resp = service + .pull_request_close(namespace, repo, number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{number}/reopen", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Reopen pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_reopen( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, number) = path.into_inner(); + let resp = service + .pull_request_reopen(namespace, repo, number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{number}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("number" = i64, Path), + ), + responses( + (status = 200, description = "Delete pull request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, number) = path.into_inner(); + service + .pull_request_delete(namespace, repo, number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/summary", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ), + responses( + (status = 200, description = "Get pull request summary", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn pull_request_summary( + service: web::Data, + session: Session, + path: web::Path<(String, String)>, +) -> Result { + let (namespace, repo) = path.into_inner(); + let resp = service + .pull_request_summary(namespace, repo, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "List pull request reviews", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_list(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewSubmitRequest, + responses( + (status = 200, description = "Submit pull request review", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_submit( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_submit(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewUpdateRequest, + responses( + (status = 200, description = "Update pull request review", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_update(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews/{reviewer_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("reviewer_id" = String, Path, description = "Reviewer UUID"), + ), + responses( + (status = 200, description = "Delete pull request review"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, String)>, +) -> Result { + let (namespace, repo, pr_number, reviewer_id) = path.into_inner(); + let reviewer_uuid = uuid::Uuid::parse_str(&reviewer_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid reviewer ID".to_string()))?; + service + .review_delete(namespace, repo, pr_number, reviewer_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("path" = Option, Query, description = "Filter by file path"), + ("resolved" = Option, Query, description = "Filter by resolved status"), + ("file_only" = Option, Query, description = "Only inline comments (true) or only general comments (false)"), + ), + responses( + (status = 200, description = "List pull request review comments", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + query: web::Query, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_comment_list(namespace, repo, pr_number, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewCommentCreateRequest, + responses( + (status = 200, description = "Create pull request review comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_create( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_comment_create(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + request_body = service::pull_request::ReviewCommentUpdateRequest, + responses( + (status = 200, description = "Update pull request review comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + let resp = service + .review_comment_update( + namespace, + repo, + pr_number, + comment_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Delete pull request review comment"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + service + .review_comment_delete(namespace, repo, pr_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Get merge analysis", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_analysis( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_analysis(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/conflicts", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Check merge conflicts", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_conflict_check( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_conflict_check(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::MergeRequest, + responses( + (status = 200, description = "Execute merge", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + (status = 409, description = "Conflict"), +), + tag = "PullRequest" +)] +pub async fn merge_execute( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_execute(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/abort", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Abort merge"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_abort( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + service + .merge_abort(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/in_progress", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "Check if merge is in progress"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn merge_is_in_progress( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .merge_is_in_progress(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "in_progress": resp })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/diff/side-by-side", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("pr_number" = i64, Path, description = "Pull request number"), + ), + responses( + (status = 200, description = "Side-by-side diff for a pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn pr_diff_side_by_side( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + query: web::Query, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .pr_diff_side_by_side(namespace, repo, pr_number, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/commits", + params( + ("namespace" = String, Path, description = "Project namespace"), + ("repo" = String, Path, description = "Repository name"), + ("pr_number" = i64, Path, description = "Pull request number"), + ), + responses( + (status = 200, description = "List commits in a pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn pr_commits_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .pr_commits_list(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/pull_request/review.rs b/libs/api/pull_request/review.rs new file mode 100644 index 0000000..748d283 --- /dev/null +++ b/libs/api/pull_request/review.rs @@ -0,0 +1,122 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "List pull request reviews", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_list(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewSubmitRequest, + responses( + (status = 200, description = "Submit pull request review", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_submit( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_submit(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewUpdateRequest, + responses( + (status = 200, description = "Update pull request review", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_update(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews/{reviewer_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("reviewer_id" = String, Path, description = "Reviewer UUID"), + ), + responses( + (status = 200, description = "Delete pull request review"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, String)>, +) -> Result { + let (namespace, repo, pr_number, reviewer_id) = path.into_inner(); + let reviewer_uuid = uuid::Uuid::parse_str(&reviewer_id) + .map_err(|_| service::error::AppError::BadRequest("Invalid reviewer ID".to_string()))?; + service + .review_delete(namespace, repo, pr_number, reviewer_uuid, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/pull_request/review_comment.rs b/libs/api/pull_request/review_comment.rs new file mode 100644 index 0000000..f885bc1 --- /dev/null +++ b/libs/api/pull_request/review_comment.rs @@ -0,0 +1,229 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::pull_request::review_comment::{ReviewCommentListQuery, ReviewCommentReplyRequest}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("path" = Option, Query, description = "Filter by file path"), + ("resolved" = Option, Query, description = "Filter by resolved status"), + ("file_only" = Option, Query, description = "Only inline comments (true) or only general comments (false)"), + ), + responses( + (status = 200, description = "List pull request review comments", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + query: web::Query, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_comment_list(namespace, repo, pr_number, query.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = service::pull_request::ReviewCommentCreateRequest, + responses( + (status = 200, description = "Create pull request review comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_create( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_comment_create(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + request_body = service::pull_request::ReviewCommentUpdateRequest, + responses( + (status = 200, description = "Update pull request review comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_update( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + let resp = service + .review_comment_update( + namespace, + repo, + pr_number, + comment_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Delete pull request review comment"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "PullRequest" +)] +pub async fn review_comment_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + service + .review_comment_delete(namespace, repo, pr_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + put, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}/resolve", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Mark comment as resolved", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_comment_resolve( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + let resp = service + .review_comment_resolve(namespace, repo, pr_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}/resolve", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + responses( + (status = 200, description = "Mark comment as unresolved", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_comment_unresolve( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + let resp = service + .review_comment_unresolve(namespace, repo, pr_number, comment_id, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}/replies", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("comment_id" = i64, Path), + ), + request_body = service::pull_request::review_comment::ReviewCommentReplyRequest, + responses( + (status = 200, description = "Reply to a comment", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_comment_reply( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number, comment_id) = path.into_inner(); + let resp = service + .review_comment_reply( + namespace, + repo, + pr_number, + comment_id, + body.into_inner(), + &session, + ) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/pull_request/review_request.rs b/libs/api/pull_request/review_request.rs new file mode 100644 index 0000000..f4200c7 --- /dev/null +++ b/libs/api/pull_request/review_request.rs @@ -0,0 +1,121 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::pull_request::review_request::ReviewRequestCreateRequest; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + responses( + (status = 200, description = "List review requests for a pull request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_request_list( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_request_list(namespace, repo, pr_number, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ), + request_body = ReviewRequestCreateRequest, + responses( + (status = 200, description = "Create or update a review request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_request_create( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64)>, + body: web::Json, +) -> Result { + let (namespace, repo, pr_number) = path.into_inner(); + let resp = service + .review_request_create(namespace, repo, pr_number, body.into_inner(), &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests/{reviewer}", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("reviewer" = Uuid, Path), + ), + responses( + (status = 200, description = "Delete (cancel) a review request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_request_delete( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, Uuid)>, +) -> Result { + let (namespace, repo, pr_number, reviewer) = path.into_inner(); + service + .review_request_delete(namespace, repo, pr_number, reviewer, &session) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + post, + path = "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests/{reviewer}/dismiss", + params( + ("namespace" = String, Path), + ("repo" = String, Path), + ("pr_number" = i64, Path), + ("reviewer" = Uuid, Path), + ), + responses( + (status = 200, description = "Dismiss a review request", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "PullRequest" +)] +pub async fn review_request_dismiss( + service: web::Data, + session: Session, + path: web::Path<(String, String, i64, Uuid)>, +) -> Result { + let (namespace, repo, pr_number, reviewer) = path.into_inner(); + let resp = service + .review_request_dismiss(namespace, repo, pr_number, reviewer, &session) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/room/ai.rs b/libs/api/room/ai.rs new file mode 100644 index 0000000..6d0557f --- /dev/null +++ b/libs/api/room/ai.rs @@ -0,0 +1,104 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/ai", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "List room AI configurations", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn ai_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_ai_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + put, + path = "/api/rooms/{room_id}/ai", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomAiUpsertRequest, + responses( + (status = 200, description = "Upsert room AI configuration", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn ai_upsert( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_ai_upsert(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}/ai/{model_id}", + params( + ("room_id" = Uuid, Path), + ("model_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Delete room AI configuration"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn ai_delete( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (room_id, model_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .room_ai_delete(room_id, model_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/category.rs b/libs/api/room/category.rs new file mode 100644 index 0000000..4970878 --- /dev/null +++ b/libs/api/room/category.rs @@ -0,0 +1,137 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/project_room/{project_name}/room-categories", + params( + ("project_name" = String, Path), + ), + responses( + (status = 200, description = "List room categories", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn category_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_category_list(project_name, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/project_room/{project_name}/room-categories", + params( + ("project_name" = String, Path), + ), + request_body = room::RoomCategoryCreateRequest, + responses( + (status = 200, description = "Create room category", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn category_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_category_create(project_name, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/room-categories/{category_id}", + params( + ("category_id" = Uuid, Path), + ), + request_body = room::RoomCategoryUpdateRequest, + responses( + (status = 200, description = "Update room category", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn category_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let category_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_category_update(category_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/room-categories/{category_id}", + params( + ("category_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Delete room category"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn category_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let category_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .room_category_delete(category_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/draft_and_history.rs b/libs/api/room/draft_and_history.rs new file mode 100644 index 0000000..5280743 --- /dev/null +++ b/libs/api/room/draft_and_history.rs @@ -0,0 +1,97 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/messages/{message_id}/edit-history", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Get message edit history", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_edit_history( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .get_message_edit_history(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[derive(Debug, serde::Deserialize)] +pub struct MentionQuery { + pub limit: Option, +} + +#[utoipa::path( + get, + path = "/api/me/mentions", + params( + ("limit" = Option, Query), + ), + responses( + (status = 200, description = "List mentions", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + ), + tag = "Room" +)] +pub async fn mention_list( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .get_mention_notifications(query.limit, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/me/mentions/read-all", + responses( + (status = 200, description = "Mark all mentions as read"), + (status = 401, description = "Unauthorized"), + ), + tag = "Room" +)] +pub async fn mention_read_all( + service: web::Data, + session: Session, +) -> Result { + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .mark_mention_notifications_read(&ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/member.rs b/libs/api/room/member.rs new file mode 100644 index 0000000..5079796 --- /dev/null +++ b/libs/api/room/member.rs @@ -0,0 +1,208 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/members", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "List room members", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_member_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/members", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomMemberAddRequest, + responses( + (status = 200, description = "Add room member", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_add( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_member_add(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/rooms/{room_id}/members/{user_id}/role", + params( + ("room_id" = Uuid, Path), + ("user_id" = Uuid, Path), + ), + request_body = room::RoomMemberRoleUpdateRequest, + responses( + (status = 200, description = "Update member role", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_update_role( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, + body: web::Json, +) -> Result { + let (room_id, user_id) = path.into_inner(); + let req = room::RoomMemberRoleUpdateRequest { + user_id, + role: body.into_inner().role, + }; + let actor_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(actor_id); + let resp = service + .room + .room_member_update_role(room_id, req, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}/members/{user_id}", + params( + ("room_id" = Uuid, Path), + ("user_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Remove room member"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_remove( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (room_id, user_id) = path.into_inner(); + let actor_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(actor_id); + service + .room + .room_member_remove(room_id, user_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/rooms/{room_id}/members/me/read-seq", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomMemberReadSeqRequest, + responses( + (status = 200, description = "Set member read sequence", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_set_read_seq( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_member_set_read_seq(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}/members/me", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Leave room"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn member_leave( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .room_member_leave(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/message.rs b/libs/api/room/message.rs new file mode 100644 index 0000000..e1e4e4c --- /dev/null +++ b/libs/api/room/message.rs @@ -0,0 +1,189 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use utoipa::IntoParams; +use uuid::Uuid; + +#[derive(Debug, serde::Deserialize, IntoParams)] +pub struct MessageListQuery { + pub before_seq: Option, + pub after_seq: Option, + pub limit: Option, +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/messages", + params( + ("room_id" = Uuid, Path), + ("before_seq" = Option, Query), + ("after_seq" = Option, Query), + ("limit" = Option, Query), + ), + responses( + (status = 200, description = "List room messages", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_list( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_message_list( + room_id, + query.before_seq, + query.after_seq, + query.limit, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/messages", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomMessageCreateRequest, + responses( + (status = 200, description = "Create room message", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_message_create(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/rooms/{room_id}/messages/{message_id}", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + request_body = room::RoomMessageUpdateRequest, + responses( + (status = 200, description = "Update room message", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_update( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, + body: web::Json, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_message_update(message_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/messages/{message_id}/revoke", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Revoke room message", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_revoke( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_message_revoke(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/messages/{message_id}", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Get room message", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_get( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_message_get(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/room/mod.rs b/libs/api/room/mod.rs new file mode 100644 index 0000000..92a311e --- /dev/null +++ b/libs/api/room/mod.rs @@ -0,0 +1,173 @@ +pub mod ai; +pub mod category; +pub mod draft_and_history; +pub mod member; +pub mod message; +pub mod notification; +pub mod pin; +pub mod reaction; +pub mod room; +pub mod thread; +pub mod ws; +pub mod ws_handler; +pub mod ws_types; +pub mod ws_universal; + +use actix_web::web; + +pub fn init_room_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("") + .route( + "/project_room/{project_name}/rooms", + web::get().to(room::room_list), + ) + .route( + "/project_room/{project_name}/rooms", + web::post().to(room::room_create), + ) + .route( + "/project_room/{project_name}/room-categories", + web::get().to(category::category_list), + ) + .route( + "/project_room/{project_name}/room-categories", + web::post().to(category::category_create), + ) + .route("/rooms/{room_id}", web::get().to(room::room_get)) + .route("/rooms/{room_id}", web::patch().to(room::room_update)) + .route("/rooms/{room_id}", web::delete().to(room::room_delete)) + .route( + "/rooms/{room_id}/messages", + web::get().to(message::message_list), + ) + .route( + "/rooms/{room_id}/messages", + web::post().to(message::message_create), + ) + .route( + "/rooms/{room_id}/messages/{message_id}", + web::patch().to(message::message_update), + ) + .route( + "/rooms/{room_id}/messages/{message_id}", + web::get().to(message::message_get), + ) + .route( + "/rooms/{room_id}/messages/{message_id}/revoke", + web::post().to(message::message_revoke), + ) + // room pins + .route("/rooms/{room_id}/pins", web::get().to(pin::pin_list)) + .route( + "/rooms/{room_id}/messages/{message_id}/pin", + web::post().to(pin::pin_add), + ) + .route( + "/rooms/{room_id}/messages/{message_id}/pin", + web::delete().to(pin::pin_remove), + ) + // room threads + .route( + "/rooms/{room_id}/threads", + web::get().to(thread::thread_list), + ) + .route( + "/rooms/{room_id}/threads", + web::post().to(thread::thread_create), + ) + .route( + "/rooms/{room_id}/threads/{thread_id}/messages", + web::get().to(thread::thread_messages), + ) + // room members + .route( + "/rooms/{room_id}/members", + web::get().to(member::member_list), + ) + .route( + "/rooms/{room_id}/members", + web::post().to(member::member_add), + ) + .route( + "/rooms/{room_id}/members/{user_id}", + web::delete().to(member::member_remove), + ) + .route( + "/rooms/{room_id}/members/me", + web::delete().to(member::member_leave), + ) + .route( + "/rooms/{room_id}/members/me/read-seq", + web::patch().to(member::member_set_read_seq), + ) + .route( + "/rooms/{room_id}/members/{user_id}/role", + web::patch().to(member::member_update_role), + ) + // room reactions + .route( + "/rooms/{room_id}/messages/{message_id}/reactions", + web::post().to(reaction::reaction_add), + ) + .route( + "/rooms/{room_id}/messages/{message_id}/reactions/{emoji}", + web::delete().to(reaction::reaction_remove), + ) + .route( + "/rooms/{room_id}/messages/{message_id}/reactions", + web::get().to(reaction::reaction_get), + ) + // message search + .route( + "/rooms/{room_id}/messages/search", + web::get().to(reaction::message_search), + ) + // message edit history + .route( + "/rooms/{room_id}/messages/{message_id}/edit-history", + web::get().to(draft_and_history::message_edit_history), + ) + // mention notifications + .route( + "/me/mentions", + web::get().to(draft_and_history::mention_list), + ) + .route( + "/me/mentions/read-all", + web::post().to(draft_and_history::mention_read_all), + ) + // room AI + .route("/rooms/{room_id}/ai", web::get().to(ai::ai_list)) + .route("/rooms/{room_id}/ai", web::put().to(ai::ai_upsert)) + .route( + "/rooms/{room_id}/ai/{model_id}", + web::delete().to(ai::ai_delete), + ) + // room category management + .route( + "/room-categories/{category_id}", + web::patch().to(category::category_update), + ) + .route( + "/room-categories/{category_id}", + web::delete().to(category::category_delete), + ) + .route( + "/me/notifications", + web::get().to(notification::notification_list), + ) + .route( + "/me/notifications/{notification_id}/read", + web::post().to(notification::notification_mark_read), + ) + .route( + "/me/notifications/read-all", + web::post().to(notification::notification_mark_all_read), + ) + .route( + "/me/notifications/{notification_id}/archive", + web::post().to(notification::notification_archive), + ), + ); +} diff --git a/libs/api/room/notification.rs b/libs/api/room/notification.rs new file mode 100644 index 0000000..dc597c6 --- /dev/null +++ b/libs/api/room/notification.rs @@ -0,0 +1,132 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use utoipa::IntoParams; +use uuid::Uuid; + +#[derive(Debug, serde::Deserialize, IntoParams)] +pub struct NotificationListQuery { + pub only_unread: Option, + pub archived: Option, + pub limit: Option, +} + +#[utoipa::path( + get, + path = "/api/me/notifications", + params( + ("only_unread" = Option, Query), + ("archived" = Option, Query), + ("limit" = Option, Query), + ), + responses( + (status = 200, description = "List notifications", body = ApiResponse), + (status = 401, description = "Unauthorized"), + ), + tag = "Room" +)] +pub async fn notification_list( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .notification_list(query.only_unread, query.archived, query.limit, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/me/notifications/{notification_id}/read", + params( + ("notification_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Mark notification as read"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn notification_mark_read( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let notification_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .notification_mark_read(notification_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} + +#[utoipa::path( + post, + path = "/api/me/notifications/read-all", + responses( + (status = 200, description = "Mark all notifications as read"), + (status = 401, description = "Unauthorized"), + ), + tag = "Room" +)] +pub async fn notification_mark_all_read( + service: web::Data, + session: Session, +) -> Result { + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let count = service + .room + .notification_mark_all_read(&ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(count).to_response()) +} + +#[utoipa::path( + post, + path = "/api/me/notifications/{notification_id}/archive", + params( + ("notification_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Archive notification"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn notification_archive( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let notification_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .notification_archive(notification_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/pin.rs b/libs/api/room/pin.rs new file mode 100644 index 0000000..fda1964 --- /dev/null +++ b/libs/api/room/pin.rs @@ -0,0 +1,103 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use uuid::Uuid; + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/pins", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "List room pins", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn pin_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_pin_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/messages/{message_id}/pin", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Add room pin", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn pin_add( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_pin_add(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}/messages/{message_id}/pin", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Remove room pin"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn pin_remove( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .room_pin_remove(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/reaction.rs b/libs/api/room/reaction.rs new file mode 100644 index 0000000..2e70bdd --- /dev/null +++ b/libs/api/room/reaction.rs @@ -0,0 +1,155 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use utoipa::IntoParams; +use uuid::Uuid; + +#[derive(Debug, serde::Deserialize, IntoParams, utoipa::ToSchema)] +pub struct ReactionRequest { + pub emoji: String, +} + +#[derive(Debug, serde::Deserialize, IntoParams)] +pub struct MessageSearchQuery { + pub q: String, + pub limit: Option, + pub offset: Option, +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/messages/{message_id}/reactions", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + request_body = ReactionRequest, + responses( + (status = 200, description = "Add reaction", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn reaction_add( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, + body: web::Json, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .message_reaction_add(message_id, body.into_inner().emoji, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}/messages/{message_id}/reactions/{emoji}", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ("emoji" = String, Path), + ), + responses( + (status = 200, description = "Remove reaction", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn reaction_remove( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid, String)>, +) -> Result { + let (_room_id, message_id, emoji) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .message_reaction_remove(message_id, emoji, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/messages/{message_id}/reactions", + params( + ("room_id" = Uuid, Path), + ("message_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Get reactions", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn reaction_get( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, +) -> Result { + let (_room_id, message_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .message_reactions_get(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/messages/search", + params( + ("room_id" = Uuid, Path), + ("q" = String, Query), + ("limit" = Option, Query), + ("offset" = Option, Query), + ), + responses( + (status = 200, description = "Search messages", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn message_search( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .message_search(room_id, &query.q, query.limit, query.offset, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/room/room.rs b/libs/api/room/room.rs new file mode 100644 index 0000000..9235f20 --- /dev/null +++ b/libs/api/room/room.rs @@ -0,0 +1,176 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use utoipa::IntoParams; +use uuid::Uuid; + +#[derive(Debug, serde::Deserialize, IntoParams)] +pub struct RoomListQuery { + pub only_public: Option, +} + +#[utoipa::path( + get, + path = "/api/project_room/{project_name}/rooms", + params( + ("project_name" = String, Path), + ("only_public" = Option, Query), + ), + responses( + (status = 200, description = "List rooms", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Room" +)] +pub async fn room_list( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_list(project_name, query.only_public, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Get room", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "Room" +)] +pub async fn room_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_get(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/project_room/{project_name}/rooms", + params( + ("project_name" = String, Path), + ), + request_body = room::RoomCreateRequest, + responses( + (status = 200, description = "Create room", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Room" +)] +pub async fn room_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_create(project_name, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/rooms/{room_id}", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomUpdateRequest, + responses( + (status = 200, description = "Update room", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Room" +)] +pub async fn room_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_update(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/rooms/{room_id}", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Delete room"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), +), + tag = "Room" +)] +pub async fn room_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + service + .room + .room_delete(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(true).to_response()) +} diff --git a/libs/api/room/thread.rs b/libs/api/room/thread.rs new file mode 100644 index 0000000..fd76cf0 --- /dev/null +++ b/libs/api/room/thread.rs @@ -0,0 +1,121 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use room::ws_context::WsUserContext; +use service::AppService; +use session::Session; +use utoipa::IntoParams; +use uuid::Uuid; + +#[derive(Debug, serde::Deserialize, IntoParams)] +pub struct ThreadMessagesQuery { + pub before_seq: Option, + pub after_seq: Option, + pub limit: Option, +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/threads", + params( + ("room_id" = Uuid, Path), + ), + responses( + (status = 200, description = "List room threads", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn thread_list( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_thread_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/rooms/{room_id}/threads", + params( + ("room_id" = Uuid, Path), + ), + request_body = room::RoomThreadCreateRequest, + responses( + (status = 200, description = "Create room thread", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn thread_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let room_id = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_thread_create(room_id, body.into_inner(), &ctx) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/rooms/{room_id}/threads/{thread_id}/messages", + params( + ("room_id" = Uuid, Path), + ("thread_id" = Uuid, Path), + ("before_seq" = Option, Query), + ("after_seq" = Option, Query), + ("limit" = Option, Query), + ), + responses( + (status = 200, description = "List thread messages", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Room" +)] +pub async fn thread_messages( + service: web::Data, + session: Session, + path: web::Path<(Uuid, Uuid)>, + query: web::Query, +) -> Result { + let (_room_id, thread_id) = path.into_inner(); + let user_id = session + .user() + .ok_or_else(|| ApiError::from(service::error::AppError::Unauthorized))?; + let ctx = WsUserContext::new(user_id); + let resp = service + .room + .room_thread_messages( + thread_id, + query.before_seq, + query.after_seq, + query.limit, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/room/ws.rs b/libs/api/room/ws.rs new file mode 100644 index 0000000..8d79481 --- /dev/null +++ b/libs/api/room/ws.rs @@ -0,0 +1,705 @@ +use std::sync::{Arc, LazyLock}; +use std::time::{Duration, Instant}; + +use actix_web::{HttpMessage, HttpRequest, HttpResponse, web}; +use actix_ws::Message as WsMessage; +use serde::Serialize; +use uuid::Uuid; + +use queue::{ProjectRoomEvent, RoomMessageEvent, RoomMessageStreamChunkEvent}; +use service::AppService; +use session::Session; + +const MAX_TEXT_MESSAGE_LEN: usize = 64 * 1024; +const MAX_MESSAGES_PER_SECOND: u32 = 10; + +const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(30); +const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(60); +const MAX_IDLE_TIMEOUT: Duration = Duration::from_secs(300); +const RATE_LIMIT_WINDOW: Duration = Duration::from_secs(1); + +/// Authenticate WebSocket request: try query parameter token first, then fall back to session. +async fn authenticate_ws_request( + service: &AppService, + req: &HttpRequest, +) -> Result { + // Try query parameter token first (one-time use via Redis) + if let Some(token) = req.uri().query().and_then(|q| { + q.split('&') + .find(|p| p.starts_with("token=")) + .and_then(|p| p.split('=').nth(1)) + }) { + match service.ws_token.validate_token(token).await { + Ok(uid) => { + slog::debug!(service.logs, "WS: token auth successful for uid={}", uid); + return Ok(uid); + } + Err(_) => { + slog::warn!(service.logs, "WS: token auth failed"); + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(crate::error::ApiError(service::error::AppError::Unauthorized).into()); + } + } + } + + // Fall back to session-based auth + let session = Session::get_session(&mut req.extensions_mut()); + match session.user() { + Some(uid) => Ok(uid), + None => { + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + Err(crate::error::ApiError(service::error::AppError::Unauthorized).into()) + } + } +} + +async fn check_ws_rate_limit( + log: &slog::Logger, + manager: &Arc, + message_count: &mut u32, + rate_window_start: &mut Instant, +) -> bool { + if rate_window_start.elapsed() > RATE_LIMIT_WINDOW { + *message_count = 0; + *rate_window_start = Instant::now(); + } + *message_count += 1; + if *message_count > MAX_MESSAGES_PER_SECOND { + slog::warn!(log, "WS rate limit exceeded"); + manager.metrics.ws_rate_limit_hits.increment(1); + true + } else { + false + } +} + +#[derive(Clone, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum WsEventPayload { + RoomMessage(RoomMessagePayload), + ProjectEvent(ProjectEventPayload), + AiStreamChunk(AiStreamChunkPayload), +} + +#[derive(Clone, Serialize)] +pub struct AiStreamChunkPayload { + pub message_id: Uuid, + pub room_id: Uuid, + pub content: String, + pub done: bool, + pub error: Option, +} + +impl From for AiStreamChunkPayload { + fn from(e: RoomMessageStreamChunkEvent) -> Self { + Self { + message_id: e.message_id, + room_id: e.room_id, + content: e.content, + done: e.done, + error: e.error, + } + } +} + +impl From> for AiStreamChunkPayload { + fn from(e: Arc) -> Self { + AiStreamChunkPayload::from((&*e).clone()) + } +} + +#[derive(Clone, Serialize)] +pub struct RoomMessagePayload { + pub id: Uuid, + pub room_id: Uuid, + pub sender_type: String, + pub sender_id: Option, + pub thread_id: Option, + pub content: String, + pub content_type: String, + pub send_at: chrono::DateTime, + pub seq: i64, + pub display_name: Option, +} + +impl From for RoomMessagePayload { + fn from(e: RoomMessageEvent) -> Self { + Self { + id: e.id, + room_id: e.room_id, + sender_type: e.sender_type, + sender_id: e.sender_id, + thread_id: e.thread_id, + content: e.content, + content_type: e.content_type, + send_at: e.send_at, + seq: e.seq, + display_name: e.display_name, + } + } +} + +impl From> for RoomMessagePayload { + fn from(e: Arc) -> Self { + RoomMessagePayload::from((&*e).clone()) + } +} + +impl From<&RoomMessageEvent> for RoomMessagePayload { + fn from(e: &RoomMessageEvent) -> Self { + Self { + id: e.id, + room_id: e.room_id, + sender_type: e.sender_type.clone(), + sender_id: e.sender_id, + thread_id: e.thread_id, + content: e.content.clone(), + content_type: e.content_type.clone(), + send_at: e.send_at, + seq: e.seq, + display_name: e.display_name.clone(), + } + } +} + +#[derive(Clone, Serialize)] +pub struct ProjectEventPayload { + pub event_type: String, + pub project_id: Uuid, + pub room_id: Option, + pub category_id: Option, + pub message_id: Option, + pub seq: Option, + pub timestamp: chrono::DateTime, +} + +impl From for ProjectEventPayload { + fn from(e: ProjectRoomEvent) -> Self { + Self { + event_type: e.event_type, + project_id: e.project_id, + room_id: e.room_id, + category_id: e.category_id, + message_id: e.message_id, + seq: e.seq, + timestamp: e.timestamp, + } + } +} + +impl From> for ProjectEventPayload { + fn from(e: Arc) -> Self { + ProjectEventPayload::from((&*e).clone()) + } +} + +impl From<&ProjectRoomEvent> for ProjectEventPayload { + fn from(e: &ProjectRoomEvent) -> Self { + Self { + event_type: e.event_type.clone(), + project_id: e.project_id, + room_id: e.room_id, + category_id: e.category_id, + message_id: e.message_id, + seq: e.seq, + timestamp: e.timestamp, + } + } +} + +#[derive(Clone, Serialize)] +pub struct WsOutEvent { + #[serde(skip_serializing_if = "Option::is_none")] + pub room_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub project_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub event: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +pub(crate) fn validate_origin(req: &HttpRequest) -> bool { + static ALLOWED_ORIGINS: LazyLock> = LazyLock::new(|| { + std::env::var("WS_ALLOWED_ORIGINS") + .map(|v| v.split(',').map(|s| s.trim().to_string()).collect()) + .unwrap_or_else(|_| { + vec![ + "http://localhost".to_string(), + "https://localhost".to_string(), + "http://127.0.0.1".to_string(), + "https://127.0.0.1".to_string(), + "ws://localhost".to_string(), + "wss://localhost".to_string(), + "ws://127.0.0.1".to_string(), + "wss://127.0.0.1".to_string(), + ] + }) + }); + + let Some(origin) = req.headers().get("origin") else { + return true; + }; + let Ok(origin_str) = origin.to_str() else { + return false; + }; + + // Exact match (with port) + if ALLOWED_ORIGINS.iter().any(|allowed| origin_str == *allowed) { + return true; + } + + // Strip port: http://localhost:5173 -> http://localhost, http://[::1]:5173 -> http://[::1] + let origin_without_port = if let Some((scheme_host, port)) = origin_str.rsplit_once(':') { + if port.chars().all(|c| c.is_ascii_digit()) { + scheme_host.to_string() + } else { + origin_str.to_string() + } + } else { + origin_str.to_string() + }; + + if ALLOWED_ORIGINS + .iter() + .any(|allowed| origin_without_port == *allowed) + { + return true; + } + + // Also check if the full origin starts with any allowed prefix + ALLOWED_ORIGINS + .iter() + .any(|allowed| origin_str.starts_with(allowed)) +} + +pub async fn ws_room( + room_id: web::Path, + service: web::Data, + req: HttpRequest, + stream: web::Payload, +) -> Result { + let room_id = room_id.into_inner(); + + // Authenticate: try query parameter token first, then session + let user_id = authenticate_ws_request(&service, &req).await?; + + let origin_val = req + .headers() + .get("origin") + .and_then(|v| v.to_str().ok()) + .unwrap_or("(none)"); + slog::debug!( + service.logs, + "WS room connection attempt user_id={} room_id={} origin={}", + user_id, + room_id, + origin_val + ); + + if !validate_origin(&req) { + slog::warn!( + service.logs, + "WS room: origin rejected user_id={} room_id={} origin={}", + user_id, + room_id, + origin_val + ); + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(crate::error::ApiError(service::error::AppError::BadRequest( + "Invalid origin".into(), + )) + .into()); + } + + if let Err(e) = service.room.check_room_access(room_id, user_id).await { + slog::warn!( + service.logs, + "WS room: access denied for user_id={} room_id={} error={}", + user_id, + room_id, + e + ); + return Err(crate::error::ApiError::from(e).into()); + } + + let manager = service.room.room_manager.clone(); + manager.metrics.ws_connections_active.increment(1.0); + manager.metrics.ws_connections_total.increment(1); + manager.metrics.incr_room_connections(room_id).await; + + let (response, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?; + + actix::spawn(async move { + let mut receiver = match manager.subscribe(room_id, user_id).await { + Ok(r) => r, + Err(e) => { + slog::error!(service.logs, "Failed to subscribe to room: {}", e); + return; + } + }; + let mut stream_rx = manager.subscribe_room_stream(room_id).await; + let mut shutdown_rx = manager.subscribe_shutdown(); + + let mut last_heartbeat = Instant::now(); + let mut last_activity = Instant::now(); + let mut heartbeat_interval = tokio::time::interval(HEARTBEAT_INTERVAL); + heartbeat_interval.tick().await; + + let mut message_count: u32 = 0; + let mut rate_window_start = Instant::now(); + + loop { + tokio::select! { + _ = heartbeat_interval.tick() => { + if last_heartbeat.elapsed() > HEARTBEAT_TIMEOUT { + slog::warn!(service.logs, "WS room {} heartbeat timeout for user {}", room_id, user_id); + manager.metrics.ws_heartbeat_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Policy.into())).await; + break; + } + + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(service.logs, "WS room {} idle timeout for user {}", room_id, user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + + if session.ping(b"").await.is_err() { + break; + } + manager.metrics.ws_heartbeat_sent_total.increment(1); + } + _ = shutdown_rx.recv() => { + slog::info!(service.logs, "WS room {} shutdown", room_id); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + msg = msg_stream.recv() => { + match msg { + Some(Ok(WsMessage::Ping(bytes))) => { + if session.pong(&bytes).await.is_err() { + break; + } + last_heartbeat = Instant::now(); + } + Some(Ok(WsMessage::Pong(_))) => { + last_heartbeat = Instant::now(); + } + #[allow(unused_assignments)] + Some(Ok(WsMessage::Text(text))) => { + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(service.logs, "WS room {} idle timeout for user {}", room_id, user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + last_activity = Instant::now(); + if check_ws_rate_limit(&service.logs, &manager, &mut message_count, &mut rate_window_start).await { + let _ = session.text(serde_json::json!({ + "type": "error", + "error": "rate_limit_exceeded", + "max_per_second": MAX_MESSAGES_PER_SECOND + }).to_string()).await; + break; + } + + if text.len() > MAX_TEXT_MESSAGE_LEN { + slog::warn!(service.logs, "WS room {} message too long from user {}: {} bytes", room_id, user_id, text.len()); + let _ = session.text(serde_json::json!({ + "type": "error", + "error": "message_too_long", + "max_bytes": MAX_TEXT_MESSAGE_LEN + }).to_string()).await; + break; + } + + slog::warn!(service.logs, "WS room {} unexpected text message from user {} ({} bytes) — WS is push-only, use REST to send messages", room_id, user_id, text.len()); + let _ = session.text(serde_json::json!({ + "type": "error", + "error": "ws_push_only", + "message": "WebSocket is for receiving messages only. Use the REST API to send messages." + }).to_string()).await; + break; + } + Some(Ok(WsMessage::Binary(_))) => { + if check_ws_rate_limit(&service.logs, &manager, &mut message_count, &mut rate_window_start).await { + break; + } + slog::warn!(service.logs, "WS room {} unexpected binary from user {}", room_id, user_id); + break; + } + Some(Ok(WsMessage::Close(reason))) => { + let _ = session.close(reason).await; + break; + } + Some(Ok(_)) => {} + Some(Err(e)) => { + slog::warn!(service.logs, "WS room error: {}", e); + break; + } + None => break, + } + } + event = receiver.recv() => { + match event { + Ok(event) => { + let payload = WsOutEvent { + room_id: Some(room_id), + project_id: None, + event: Some(WsEventPayload::RoomMessage(event.into())), + error: None, + }; + match serde_json::to_string(&payload) { + Ok(json) => { + if session.text(json).await.is_err() { + break; + } + } + Err(e) => { + slog::error!(service.logs, "WS serialize error: {}", e); + break; + } + } + } + Err(_) => break, + } + } + chunk_event = stream_rx.recv() => { + match chunk_event { + Ok(chunk) => { + let payload = WsOutEvent { + room_id: Some(room_id), + project_id: None, + event: Some(WsEventPayload::AiStreamChunk(chunk.into())), + error: None, + }; + match serde_json::to_string(&payload) { + Ok(json) => { + if session.text(json).await.is_err() { + break; + } + } + Err(e) => { + slog::error!(service.logs, "WS streaming serialize error: {}", e); + } + } + } + Err(_) => {} + } + } + } + } + + manager.unsubscribe(room_id, user_id).await; + manager.metrics.ws_connections_active.decrement(1.0); + manager.metrics.ws_disconnections_total.increment(1); + manager.metrics.dec_room_connections(room_id).await; + }); + + Ok(response) +} + +pub async fn ws_project( + project_id: web::Path, + service: web::Data, + req: HttpRequest, + stream: web::Payload, +) -> Result { + let project_id = project_id.into_inner(); + + // Authenticate: try query parameter token first, then session + let user_id = authenticate_ws_request(&service, &req).await?; + + if !validate_origin(&req) { + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(crate::error::ApiError(service::error::AppError::BadRequest( + "Invalid origin".into(), + )) + .into()); + } + + if let Err(e) = service.room.check_project_member(project_id, user_id).await { + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(crate::error::ApiError::from(e).into()); + } + + if let Err(e) = service + .room + .room_manager + .check_project_connection_rate(project_id, user_id) + .await + { + service + .room + .room_manager + .metrics + .ws_rate_limit_hits + .increment(1); + return Err(crate::error::ApiError::from(e).into()); + } + + let manager = service.room.room_manager.clone(); + manager.metrics.ws_connections_active.increment(1.0); + manager.metrics.ws_connections_total.increment(1); + + let (response, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?; + + actix::spawn(async move { + let mut receiver = match manager.subscribe_project(project_id, user_id).await { + Ok(r) => r, + Err(e) => { + slog::error!(service.logs, "Failed to subscribe to project: {}", e); + return; + } + }; + let mut shutdown_rx = manager.subscribe_shutdown(); + + let mut last_heartbeat = Instant::now(); + let mut last_activity = Instant::now(); + let mut heartbeat_interval = tokio::time::interval(HEARTBEAT_INTERVAL); + heartbeat_interval.tick().await; + + let mut message_count: u32 = 0; + let mut rate_window_start = Instant::now(); + + loop { + tokio::select! { + _ = heartbeat_interval.tick() => { + if last_heartbeat.elapsed() > HEARTBEAT_TIMEOUT { + slog::warn!(service.logs, "WS project {} heartbeat timeout for user {}", project_id, user_id); + manager.metrics.ws_heartbeat_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Policy.into())).await; + break; + } + + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(service.logs, "WS project {} idle timeout for user {}", project_id, user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + + if session.ping(b"").await.is_err() { + break; + } + manager.metrics.ws_heartbeat_sent_total.increment(1); + } + _ = shutdown_rx.recv() => { + slog::info!(service.logs, "WS project {} shutdown", project_id); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + msg = msg_stream.recv() => { + match msg { + Some(Ok(WsMessage::Ping(bytes))) => { + if session.pong(&bytes).await.is_err() { + break; + } + last_heartbeat = Instant::now(); + } + Some(Ok(WsMessage::Pong(_))) => { + last_heartbeat = Instant::now(); + } + #[allow(unused_assignments)] + Some(Ok(WsMessage::Text(text))) => { + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(service.logs, "WS project {} idle timeout for user {}", project_id, user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + last_activity = Instant::now(); + slog::warn!(service.logs, "WS project {} unexpected text from user {} ({} bytes) — WS is push-only", project_id, user_id, text.len()); + let _ = session.text(serde_json::json!({ + "type": "error", + "error": "ws_push_only", + "message": "WebSocket is for receiving events only." + }).to_string()).await; + break; + } + Some(Ok(WsMessage::Binary(_))) => { + if check_ws_rate_limit(&service.logs, &manager, &mut message_count, &mut rate_window_start).await { + slog::warn!(service.logs, "WS project {} rate limit exceeded for user {}", project_id, user_id); + let _ = session.text(serde_json::json!({ + "type": "error", + "error": "rate_limit_exceeded", + "max_per_second": MAX_MESSAGES_PER_SECOND + }).to_string()).await; + break; + } + slog::warn!(service.logs, "WS project {} unexpected binary from user {}", project_id, user_id); + break; + } + Some(Ok(WsMessage::Close(reason))) => { + let _ = session.close(reason).await; + break; + } + Some(Ok(_)) => {} + Some(Err(e)) => { + slog::warn!(service.logs, "WS project error: {}", e); + break; + } + None => break, + } + } + event = receiver.recv() => { + match event { + Ok(event) => { + let payload = WsOutEvent { + room_id: event.room_id, + project_id: Some(project_id), + event: Some(WsEventPayload::ProjectEvent(event.into())), + error: None, + }; + match serde_json::to_string(&payload) { + Ok(json) => { + if session.text(json).await.is_err() { + break; + } + } + Err(e) => { + slog::error!(service.logs, "WS serialize error: {}", e); + break; + } + } + } + Err(_) => break, + } + } + } + } + + manager.unsubscribe_project(project_id, user_id).await; + manager.metrics.ws_connections_active.decrement(1.0); + manager.metrics.ws_disconnections_total.increment(1); + }); + + Ok(response) +} diff --git a/libs/api/room/ws_handler.rs b/libs/api/room/ws_handler.rs new file mode 100644 index 0000000..8a1ae1b --- /dev/null +++ b/libs/api/room/ws_handler.rs @@ -0,0 +1,729 @@ +use crate::error::ApiError; +use actix_web::Result; +use room::ws_context::WsUserContext; +use service::AppService; +use std::sync::Arc; +use uuid::Uuid; + +pub struct WsRequestHandler { + service: Arc, + user_id: Uuid, +} + +impl WsRequestHandler { + pub fn new(service: Arc, user_id: Uuid) -> Self { + Self { service, user_id } + } + + pub async fn handle(&self, request: WsRequest) -> WsResponse { + let request_id = request.request_id; + let action_str = request.action.to_string(); + match self.handle_action(request).await { + Ok(data) => WsResponse::success(request_id, &action_str, data), + Err(err) => WsResponse::from_api_error(request_id, &action_str, err), + } + } + + async fn handle_action(&self, request: WsRequest) -> Result { + let params = request.params(); + let ctx = WsUserContext::new(self.user_id); + match request.action { + WsAction::RoomList => { + let project_name = params.project_name.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "project_name required".into(), + )) + })?; + let rooms = self + .service + .room + .room_list(project_name, params.only_public, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::room_list(rooms)) + } + WsAction::RoomGet => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let room = self + .service + .room + .room_get(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::room(room)) + } + WsAction::RoomCreate => { + let project_name = params.project_name.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "project_name required".into(), + )) + })?; + let room = self + .service + .room + .room_create( + project_name, + room::RoomCreateRequest { + room_name: params.room_name.clone().unwrap_or_default(), + public: params.room_public.unwrap_or(false), + category: params.room_category, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::room(room)) + } + WsAction::RoomUpdate => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let room = self + .service + .room + .room_update( + room_id, + room::RoomUpdateRequest { + room_name: params.room_name.clone(), + public: params.room_public, + category: params.room_category, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::room(room)) + } + WsAction::RoomDelete => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + self.service + .room + .room_delete(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::CategoryList => { + let project_name = params.project_name.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "project_name required".into(), + )) + })?; + let categories = self + .service + .room + .room_category_list(project_name, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::category_list(categories)) + } + WsAction::CategoryCreate => { + let project_name = params.project_name.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "project_name required".into(), + )) + })?; + let category = self + .service + .room + .room_category_create( + project_name, + room::RoomCategoryCreateRequest { + name: params.name.clone().unwrap_or_default(), + position: params.position, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::category(category)) + } + WsAction::CategoryUpdate => { + let category_id = params.category_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "category_id required".into(), + )) + })?; + let category = self + .service + .room + .room_category_update( + category_id, + room::RoomCategoryUpdateRequest { + name: params.name.clone(), + position: params.position, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::category(category)) + } + WsAction::CategoryDelete => { + let category_id = params.category_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "category_id required".into(), + )) + })?; + self.service + .room + .room_category_delete(category_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::MessageList => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let messages = self + .service + .room + .room_message_list( + room_id, + params.before_seq, + params.after_seq, + params.limit, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message_list(messages)) + } + WsAction::MessageCreate => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let message = self + .service + .room + .room_message_create( + room_id, + room::RoomMessageCreateRequest { + content: params.content.clone().unwrap_or_default(), + content_type: params.content_type.clone(), + thread: params.thread_id, + in_reply_to: params.in_reply_to, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message(message)) + } + WsAction::MessageUpdate => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let message = self + .service + .room + .room_message_update( + message_id, + room::RoomMessageUpdateRequest { + content: params.content.clone().unwrap_or_default(), + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message(message)) + } + WsAction::MessageRevoke => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let message = self + .service + .room + .room_message_revoke(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message(message)) + } + WsAction::MessageGet => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let message = self + .service + .room + .room_message_get(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message(message)) + } + WsAction::MemberList => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let members = self + .service + .room + .room_member_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::member_list(members)) + } + WsAction::MemberAdd => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let user_id = params.user_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "user_id required".into(), + )) + })?; + let member = self + .service + .room + .room_member_add( + room_id, + room::RoomMemberAddRequest { + user_id, + role: params.role.clone(), + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::member(member)) + } + WsAction::MemberRemove => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let user_id = params.user_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "user_id required".into(), + )) + })?; + self.service + .room + .room_member_remove(room_id, user_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::MemberLeave => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + self.service + .room + .room_member_leave(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::MemberSetReadSeq => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let last_read_seq = params.last_read_seq.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "last_read_seq required".into(), + )) + })?; + let member = self + .service + .room + .room_member_set_read_seq( + room_id, + room::RoomMemberReadSeqRequest { last_read_seq }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::member(member)) + } + WsAction::MemberUpdateRole => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let user_id = params.user_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "user_id required".into(), + )) + })?; + let role = params.role.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest("role required".into())) + })?; + let member = self + .service + .room + .room_member_update_role( + room_id, + room::RoomMemberRoleUpdateRequest { user_id, role }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::member(member)) + } + WsAction::PinList => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let pins = self + .service + .room + .room_pin_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::pin_list(pins)) + } + WsAction::PinAdd => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let pin = self + .service + .room + .room_pin_add(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::pin(pin)) + } + WsAction::PinRemove => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + self.service + .room + .room_pin_remove(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::ThreadList => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let threads = self + .service + .room + .room_thread_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::thread_list(threads)) + } + WsAction::ThreadCreate => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let parent_seq = params.parent_seq.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "parent_seq required".into(), + )) + })?; + let thread = self + .service + .room + .room_thread_create(room_id, room::RoomThreadCreateRequest { parent_seq }, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::thread(thread)) + } + WsAction::ThreadMessages => { + let thread_id = params.thread_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "thread_id required".into(), + )) + })?; + let messages = self + .service + .room + .room_thread_messages( + thread_id, + params.before_seq, + params.after_seq, + params.limit, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::message_list(messages)) + } + WsAction::ReactionAdd => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let emoji = params.emoji.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "emoji required".into(), + )) + })?; + let reactions = self + .service + .room + .message_reaction_add(message_id, emoji, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::reaction_list(reactions)) + } + WsAction::ReactionRemove => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let emoji = params.emoji.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "emoji required".into(), + )) + })?; + let reactions = self + .service + .room + .message_reaction_remove(message_id, emoji, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::reaction_list(reactions)) + } + WsAction::ReactionGet => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let reactions = self + .service + .room + .message_reactions_get(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::reaction_list(reactions)) + } + WsAction::ReactionListBatch => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let message_ids = params.message_ids.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_ids required".into(), + )) + })?; + let results = self + .service + .room + .message_reactions_batch(room_id, message_ids, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::reaction_list_batch(results)) + } + WsAction::MessageSearch => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let query = params.query.clone().ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "query required".into(), + )) + })?; + let result = self + .service + .room + .message_search(room_id, &query, params.limit, params.offset, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::search_result(result)) + } + WsAction::MessageEditHistory => { + let message_id = params.message_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "message_id required".into(), + )) + })?; + let history = self + .service + .room + .get_message_edit_history(message_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::edit_history(history)) + } + WsAction::AiList => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let configs = self + .service + .room + .room_ai_list(room_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::ai_list(configs)) + } + WsAction::AiUpsert => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let model = params.model.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "model required".into(), + )) + })?; + let config = self + .service + .room + .room_ai_upsert( + room_id, + room::RoomAiUpsertRequest { + model, + version: params.model_version, + history_limit: params.history_limit, + system_prompt: params.system_prompt.clone(), + temperature: params.temperature, + max_tokens: params.max_tokens, + use_exact: params.use_exact, + think: params.think, + stream: params.stream, + min_score: params.min_score, + }, + &ctx, + ) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::ai_config(config)) + } + WsAction::AiDelete => { + let room_id = params.room_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "room_id required".into(), + )) + })?; + let model_id = params.model_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "model_id required".into(), + )) + })?; + self.service + .room + .room_ai_delete(room_id, model_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::NotificationList => { + let notifications = self + .service + .room + .notification_list(params.only_unread, params.archived, params.limit, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::notification_list(notifications)) + } + WsAction::NotificationMarkRead => { + let notification_id = params.notification_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "notification_id required".into(), + )) + })?; + self.service + .room + .notification_mark_read(notification_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::NotificationMarkAllRead => { + let count = self + .service + .room + .notification_mark_all_read(&ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::u64(count)) + } + WsAction::NotificationArchive => { + let notification_id = params.notification_id.ok_or_else(|| { + ApiError::from(service::error::AppError::BadRequest( + "notification_id required".into(), + )) + })?; + self.service + .room + .notification_archive(notification_id, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::MentionList => { + let mentions = self + .service + .room + .get_mention_notifications(params.limit, &ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::mention_list(mentions)) + } + WsAction::MentionReadAll => { + self.service + .room + .mark_mention_notifications_read(&ctx) + .await + .map_err(ApiError::from)?; + Ok(WsResponseData::bool(true)) + } + WsAction::SubscribeRoom => Ok(WsResponseData::subscribed(params.room_id, None)), + WsAction::UnsubscribeRoom => Ok(WsResponseData::bool(true)), + WsAction::SubscribeProject => Ok(WsResponseData::subscribed(None, None)), + WsAction::UnsubscribeProject => Ok(WsResponseData::bool(true)), + } + } +} + +use super::ws_types::{WsAction, WsRequest, WsResponse, WsResponseData}; diff --git a/libs/api/room/ws_types.rs b/libs/api/room/ws_types.rs new file mode 100644 index 0000000..98d8fea --- /dev/null +++ b/libs/api/room/ws_types.rs @@ -0,0 +1,642 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::ApiError; +use room::{ + RoomCategoryResponse, RoomMemberResponse, RoomMessageListResponse, RoomMessageResponse, + RoomPinResponse, RoomResponse, RoomThreadResponse, UserInfo, +}; + +#[derive(Debug, Clone, Deserialize)] +pub struct WsRequest { + pub request_id: Uuid, + pub action: WsAction, + #[serde(default)] + pub params: WsRequestParams, +} + +impl WsRequest { + pub fn params(&self) -> &WsRequestParams { + &self.params + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize)] +pub enum WsAction { + #[serde(rename = "room.list")] + RoomList, + #[serde(rename = "room.get")] + RoomGet, + #[serde(rename = "room.create")] + RoomCreate, + #[serde(rename = "room.update")] + RoomUpdate, + #[serde(rename = "room.delete")] + RoomDelete, + #[serde(rename = "category.list")] + CategoryList, + #[serde(rename = "category.create")] + CategoryCreate, + #[serde(rename = "category.update")] + CategoryUpdate, + #[serde(rename = "category.delete")] + CategoryDelete, + #[serde(rename = "message.list")] + MessageList, + #[serde(rename = "message.create")] + MessageCreate, + #[serde(rename = "message.update")] + MessageUpdate, + #[serde(rename = "message.revoke")] + MessageRevoke, + #[serde(rename = "message.get")] + MessageGet, + #[serde(rename = "member.list")] + MemberList, + #[serde(rename = "member.add")] + MemberAdd, + #[serde(rename = "member.remove")] + MemberRemove, + #[serde(rename = "member.leave")] + MemberLeave, + #[serde(rename = "member.set_read_seq")] + MemberSetReadSeq, + #[serde(rename = "member.update_role")] + MemberUpdateRole, + #[serde(rename = "pin.list")] + PinList, + #[serde(rename = "pin.add")] + PinAdd, + #[serde(rename = "pin.remove")] + PinRemove, + #[serde(rename = "thread.list")] + ThreadList, + #[serde(rename = "thread.create")] + ThreadCreate, + #[serde(rename = "thread.messages")] + ThreadMessages, + #[serde(rename = "reaction.add")] + ReactionAdd, + #[serde(rename = "reaction.remove")] + ReactionRemove, + #[serde(rename = "reaction.get")] + ReactionGet, + #[serde(rename = "reaction.list_batch")] + ReactionListBatch, + #[serde(rename = "message.search")] + MessageSearch, + #[serde(rename = "message.edit_history")] + MessageEditHistory, + #[serde(rename = "ai.list")] + AiList, + #[serde(rename = "ai.upsert")] + AiUpsert, + #[serde(rename = "ai.delete")] + AiDelete, + #[serde(rename = "notification.list")] + NotificationList, + #[serde(rename = "notification.mark_read")] + NotificationMarkRead, + #[serde(rename = "notification.mark_all_read")] + NotificationMarkAllRead, + #[serde(rename = "notification.archive")] + NotificationArchive, + #[serde(rename = "mention.list")] + MentionList, + #[serde(rename = "mention.read_all")] + MentionReadAll, + #[serde(rename = "room.subscribe")] + SubscribeRoom, + #[serde(rename = "room.unsubscribe")] + UnsubscribeRoom, + #[serde(rename = "project.subscribe")] + SubscribeProject, + #[serde(rename = "project.unsubscribe")] + UnsubscribeProject, +} + +impl std::fmt::Display for WsAction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + WsAction::RoomList => write!(f, "room.list"), + WsAction::RoomGet => write!(f, "room.get"), + WsAction::RoomCreate => write!(f, "room.create"), + WsAction::RoomUpdate => write!(f, "room.update"), + WsAction::RoomDelete => write!(f, "room.delete"), + WsAction::CategoryList => write!(f, "category.list"), + WsAction::CategoryCreate => write!(f, "category.create"), + WsAction::CategoryUpdate => write!(f, "category.update"), + WsAction::CategoryDelete => write!(f, "category.delete"), + WsAction::MessageList => write!(f, "message.list"), + WsAction::MessageCreate => write!(f, "message.create"), + WsAction::MessageUpdate => write!(f, "message.update"), + WsAction::MessageRevoke => write!(f, "message.revoke"), + WsAction::MessageGet => write!(f, "message.get"), + WsAction::MemberList => write!(f, "member.list"), + WsAction::MemberAdd => write!(f, "member.add"), + WsAction::MemberRemove => write!(f, "member.remove"), + WsAction::MemberLeave => write!(f, "member.leave"), + WsAction::MemberSetReadSeq => write!(f, "member.set_read_seq"), + WsAction::MemberUpdateRole => write!(f, "member.update_role"), + WsAction::PinList => write!(f, "pin.list"), + WsAction::PinAdd => write!(f, "pin.add"), + WsAction::PinRemove => write!(f, "pin.remove"), + WsAction::ThreadList => write!(f, "thread.list"), + WsAction::ThreadCreate => write!(f, "thread.create"), + WsAction::ThreadMessages => write!(f, "thread.messages"), + WsAction::ReactionAdd => write!(f, "reaction.add"), + WsAction::ReactionRemove => write!(f, "reaction.remove"), + WsAction::ReactionGet => write!(f, "reaction.get"), + WsAction::ReactionListBatch => write!(f, "reaction.list_batch"), + WsAction::MessageSearch => write!(f, "message.search"), + WsAction::MessageEditHistory => write!(f, "message.edit_history"), + WsAction::AiList => write!(f, "ai.list"), + WsAction::AiUpsert => write!(f, "ai.upsert"), + WsAction::AiDelete => write!(f, "ai.delete"), + WsAction::NotificationList => write!(f, "notification.list"), + WsAction::NotificationMarkRead => write!(f, "notification.mark_read"), + WsAction::NotificationMarkAllRead => write!(f, "notification.mark_all_read"), + WsAction::NotificationArchive => write!(f, "notification.archive"), + WsAction::MentionList => write!(f, "mention.list"), + WsAction::MentionReadAll => write!(f, "mention.read_all"), + WsAction::SubscribeRoom => write!(f, "room.subscribe"), + WsAction::UnsubscribeRoom => write!(f, "room.unsubscribe"), + WsAction::SubscribeProject => write!(f, "project.subscribe"), + WsAction::UnsubscribeProject => write!(f, "project.unsubscribe"), + } + } +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(default)] +pub struct WsRequestParams { + pub project_name: Option, + pub room_id: Option, + pub message_id: Option, + pub message_ids: Option>, + pub user_id: Option, + pub category_id: Option, + pub thread_id: Option, + pub model_id: Option, + pub notification_id: Option, + pub emoji: Option, + pub only_public: Option, + pub only_unread: Option, + pub archived: Option, + pub limit: Option, + pub offset: Option, + pub before_seq: Option, + pub after_seq: Option, + pub room_name: Option, + pub room_public: Option, + pub room_category: Option, + pub content: Option, + pub content_type: Option, + pub in_reply_to: Option, + pub parent_seq: Option, + pub role: Option, + pub last_read_seq: Option, + pub name: Option, + pub position: Option, + pub model: Option, + pub model_version: Option, + pub history_limit: Option, + pub system_prompt: Option, + pub temperature: Option, + pub max_tokens: Option, + pub use_exact: Option, + pub think: Option, + pub stream: Option, + pub min_score: Option, + pub query: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct WsResponse { + pub request_id: Uuid, + pub action: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +impl WsResponse { + pub fn success(request_id: Uuid, action: &str, data: WsResponseData) -> Self { + Self { + request_id, + action: action.to_string(), + data: Some(data), + error: None, + } + } + + pub fn error_response( + request_id: Uuid, + action: &str, + code: i32, + error: &str, + message: &str, + ) -> Self { + Self { + request_id, + action: action.to_string(), + data: None, + error: Some(WsErrorInfo { + code, + error: error.to_string(), + message: message.to_string(), + }), + } + } + + pub fn from_api_error(request_id: Uuid, action: &str, err: ApiError) -> Self { + let err_code = err.0.code(); + let slug = err.0.slug(); + let msg = err.0.user_message(); + Self::error_response(request_id, action, err_code, slug, &msg) + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct WsErrorInfo { + pub code: i32, + pub error: String, + pub message: String, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(untagged)] +pub enum WsResponseData { + Bool(bool), + U64(u64), + Room(Box), + RoomList(Vec), + Category(Box), + CategoryList(Vec), + Message(Box), + MessageList(Box), + Member(Box), + MemberList(Vec), + Pin(Box), + PinList(Vec), + Thread(Box), + ThreadList(Vec), + ReactionList(ReactionListData), + ReactionListBatch(Vec), + SearchResult(SearchResultData), + EditHistory(MessageEditHistoryResponse), + AiList(Vec), + AiConfig(Box), + NotificationList(NotificationListData), + MentionList(MentionListData), + Subscribed(SubscribeData), + UserInfo(Vec), +} + +impl WsResponseData { + pub fn room(room: RoomResponse) -> Self { + WsResponseData::Room(Box::new(room)) + } + + pub fn room_list(rooms: Vec) -> Self { + WsResponseData::RoomList(rooms) + } + + pub fn category(category: RoomCategoryResponse) -> Self { + WsResponseData::Category(Box::new(category)) + } + + pub fn category_list(categories: Vec) -> Self { + WsResponseData::CategoryList(categories) + } + + pub fn message(message: RoomMessageResponse) -> Self { + WsResponseData::Message(Box::new(message)) + } + + pub fn message_list(messages: RoomMessageListResponse) -> Self { + WsResponseData::MessageList(Box::new(messages)) + } + + pub fn member(member: RoomMemberResponse) -> Self { + WsResponseData::Member(Box::new(member)) + } + + pub fn member_list(members: Vec) -> Self { + WsResponseData::MemberList(members) + } + + pub fn pin(pin: RoomPinResponse) -> Self { + WsResponseData::Pin(Box::new(PinResponseData { + room: pin.room, + message: pin.message, + pinned_by: pin.pinned_by, + pinned_at: pin.pinned_at, + message_data: None, + })) + } + + pub fn pin_list(pins: Vec) -> Self { + WsResponseData::PinList( + pins.into_iter() + .map(|p| PinResponseData { + room: p.room, + message: p.message, + pinned_by: p.pinned_by, + pinned_at: p.pinned_at, + message_data: None, + }) + .collect(), + ) + } + + pub fn thread(thread: RoomThreadResponse) -> Self { + WsResponseData::Thread(Box::new(thread)) + } + + pub fn thread_list(threads: Vec) -> Self { + WsResponseData::ThreadList(threads) + } + + pub fn bool(b: bool) -> Self { + WsResponseData::Bool(b) + } + + pub fn u64(n: u64) -> Self { + WsResponseData::U64(n) + } + + pub fn subscribed(room_id: Option, project_id: Option) -> Self { + WsResponseData::Subscribed(SubscribeData { + room_id, + project_id, + }) + } + + pub fn user_info(users: Vec) -> Self { + WsResponseData::UserInfo(users) + } + + pub fn reaction_list(data: room::MessageReactionsResponse) -> Self { + WsResponseData::ReactionList(ReactionListData::from(data)) + } + + pub fn reaction_list_batch(data: Vec) -> Self { + WsResponseData::ReactionListBatch(data.into_iter().map(ReactionListData::from).collect()) + } + + pub fn search_result(data: room::MessageSearchResponse) -> Self { + WsResponseData::SearchResult(SearchResultData::from(data)) + } + + pub fn edit_history(data: room::MessageEditHistoryResponse) -> Self { + WsResponseData::EditHistory(MessageEditHistoryResponse::from(data)) + } + + pub fn ai_list(configs: Vec) -> Self { + WsResponseData::AiList(configs.into_iter().map(AiConfigData::from).collect()) + } + + pub fn ai_config(config: room::RoomAiResponse) -> Self { + WsResponseData::AiConfig(Box::new(AiConfigData::from(config))) + } + + pub fn notification_list(data: room::NotificationListResponse) -> Self { + WsResponseData::NotificationList(NotificationListData::from(data)) + } + + pub fn mention_list(mentions: Vec) -> Self { + WsResponseData::MentionList(MentionListData::from(mentions)) + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct PinResponseData { + pub room: Uuid, + pub message: Uuid, + pub pinned_by: Uuid, + pub pinned_at: DateTime, + #[serde(skip_serializing_if = "Option::is_none")] + pub message_data: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct ReactionListData { + pub message_id: Uuid, + pub reactions: Vec, +} + +impl From for ReactionListData { + fn from(r: room::MessageReactionsResponse) -> Self { + Self { + message_id: r.message_id, + reactions: r + .reactions + .into_iter() + .map(|g| ReactionItem { + emoji: g.emoji, + count: g.count, + reacted_by_me: g.reacted_by_me, + users: g.users, + }) + .collect(), + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct ReactionItem { + pub emoji: String, + pub count: i64, + pub reacted_by_me: bool, + pub users: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct SearchResultData { + pub messages: Vec, + pub total: i64, +} + +impl From for SearchResultData { + fn from(r: room::MessageSearchResponse) -> Self { + Self { + messages: r.messages, + total: r.total, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct MessageEditHistoryResponse { + pub message_id: Uuid, + pub history: Vec, + pub total_edits: i64, +} + +impl From for MessageEditHistoryResponse { + fn from(r: room::MessageEditHistoryResponse) -> Self { + Self { + message_id: r.message_id, + history: r + .history + .into_iter() + .map(|h| EditHistoryEntry { + old_content: h.old_content, + new_content: h.new_content, + edited_at: h.edited_at, + }) + .collect(), + total_edits: r.total_edits, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct EditHistoryEntry { + pub old_content: String, + pub new_content: String, + pub edited_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize)] +pub struct AiConfigData { + pub room: Uuid, + pub model: Uuid, + pub version: Option, + pub call_count: i64, + pub last_call_at: Option>, + pub history_limit: Option, + pub system_prompt: Option, + pub temperature: Option, + pub max_tokens: Option, + pub use_exact: bool, + pub think: bool, + pub stream: bool, + pub min_score: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl From for AiConfigData { + fn from(r: room::RoomAiResponse) -> Self { + Self { + room: r.room, + model: r.model, + version: r.version, + call_count: r.call_count, + last_call_at: r.last_call_at, + history_limit: r.history_limit, + system_prompt: r.system_prompt, + temperature: r.temperature, + max_tokens: r.max_tokens, + use_exact: r.use_exact, + think: r.think, + stream: r.stream, + min_score: r.min_score, + created_at: r.created_at, + updated_at: r.updated_at, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct NotificationListData { + pub notifications: Vec, + pub total: i64, + pub unread_count: i64, +} + +#[derive(Debug, Clone, Serialize)] +pub struct NotificationData { + pub id: Uuid, + pub room: Option, + pub project: Option, + pub user_id: Option, + pub user_info: Option, + pub notification_type: String, + pub title: String, + pub content: Option, + pub related_message_id: Option, + pub related_user_id: Option, + pub related_room_id: Option, + pub is_read: bool, + pub is_archived: bool, + pub created_at: DateTime, + pub read_at: Option>, + pub expires_at: Option>, +} + +impl From for NotificationListData { + fn from(r: room::NotificationListResponse) -> Self { + Self { + notifications: r + .notifications + .into_iter() + .map(|n| NotificationData { + id: n.id, + room: n.room, + project: n.project, + user_id: n.user_id, + user_info: n.user_info, + notification_type: n.notification_type, + title: n.title, + content: n.content, + related_message_id: n.related_message_id, + related_user_id: n.related_user_id, + related_room_id: n.related_room_id, + is_read: n.is_read, + is_archived: n.is_archived, + created_at: n.created_at, + read_at: n.read_at, + expires_at: n.expires_at, + }) + .collect(), + total: r.total, + unread_count: r.unread_count, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct MentionListData { + pub mentions: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct MentionData { + pub message_id: Uuid, + pub mentioned_by: Uuid, + pub mentioned_by_name: String, + pub content_preview: String, + pub room_id: Uuid, + pub room_name: String, + pub created_at: DateTime, +} + +impl From> for MentionListData { + fn from(mentions: Vec) -> Self { + Self { + mentions: mentions + .into_iter() + .map(|m| MentionData { + message_id: m.message_id, + mentioned_by: m.mentioned_by, + mentioned_by_name: m.mentioned_by_name, + content_preview: m.content_preview, + room_id: m.room_id, + room_name: m.room_name, + created_at: m.created_at, + }) + .collect(), + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct SubscribeData { + pub room_id: Option, + pub project_id: Option, +} diff --git a/libs/api/room/ws_universal.rs b/libs/api/room/ws_universal.rs new file mode 100644 index 0000000..7a0fa7e --- /dev/null +++ b/libs/api/room/ws_universal.rs @@ -0,0 +1,442 @@ +use std::collections::HashMap; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use actix_web::{HttpRequest, HttpResponse, web}; +use actix_ws::Message as WsMessage; +use tokio_stream::StreamExt; +use tokio_stream::wrappers::BroadcastStream; +use uuid::Uuid; + +use crate::error::ApiError; +use queue::{ReactionGroup, RoomMessageEvent, RoomMessageStreamChunkEvent}; +use service::AppService; + +use super::ws::validate_origin; +use super::ws_handler::WsRequestHandler; +use super::ws_types::{WsAction, WsRequest, WsResponse, WsResponseData}; + +const MAX_TEXT_MESSAGE_LEN: usize = 64 * 1024; +const MAX_MESSAGES_PER_SECOND: u32 = 10; +const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(30); +const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(60); +const MAX_IDLE_TIMEOUT: Duration = Duration::from_secs(300); +const RATE_LIMIT_WINDOW: Duration = Duration::from_secs(1); + +/// Unified push event from any subscribed room. +#[derive(Debug, Clone)] +pub enum WsPushEvent { + RoomMessage { + room_id: Uuid, + event: Arc, + }, + ReactionUpdated { + room_id: Uuid, + message_id: Uuid, + reactions: Vec, + }, + AiStreamChunk { + room_id: Uuid, + chunk: Arc, + }, +} + +/// Maps room_id -> (room_message_broadcast_stream, stream_chunk_broadcast_stream) +type PushStreams = HashMap< + Uuid, + ( + BroadcastStream>, + BroadcastStream>, + ), +>; + +pub async fn ws_universal( + service: web::Data, + req: HttpRequest, + stream: web::Payload, +) -> Result { + let origin_val = req + .headers() + .get("origin") + .and_then(|v| v.to_str().ok()) + .unwrap_or("(none)"); + if !validate_origin(&req) { + slog::warn!( + service.logs, + "WS universal: origin rejected origin={}", + origin_val + ); + return Err(ApiError(service::error::AppError::BadRequest( + "Invalid origin".into(), + )) + .into()); + } + + // Validate token BEFORE actix_ws::handle() so we can return a proper HTTP + // error if validation fails. Returning an HTTP error after handle() has been + // called (even if the handler returns an error) sends a 200 OK on what the + // browser expects to be a 101 Switching Protocols response — causing + // immediate close with readyState=3. + let user_id = if let Some(token) = req.uri().query().and_then(|q| { + q.split('&') + .find(|p| p.starts_with("token=")) + .and_then(|p| p.split('=').nth(1)) + }) { + slog::info!( + service.logs, + "WS universal: validating token token={} origin={}", + token, + origin_val + ); + match service.ws_token.validate_token(token).await { + Ok(uid) => { + slog::info!( + service.logs, + "WS universal: token auth successful uid={} origin={}", + uid, + origin_val + ); + uid + } + Err(e) => { + slog::warn!( + service.logs, + "WS universal: token auth failed: {:?} token={}", + e, + token + ); + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(ApiError(service::error::AppError::Unauthorized).into()); + } + } + } else { + let auth_header = req + .headers() + .get("Authorization") + .and_then(|v| v.to_str().ok()); + let token = match auth_header { + Some(h) if h.starts_with("Bearer ") => &h[7..], + _ => { + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(ApiError(service::error::AppError::Unauthorized).into()); + } + }; + + match extract_user_id_from_token(token) { + Some(id) => id, + None => { + service + .room + .room_manager + .metrics + .ws_auth_failures + .increment(1); + return Err(ApiError(service::error::AppError::Unauthorized).into()); + } + } + }; + + slog::debug!( + service.logs, + "WS universal connection established user_id={} origin={}", + user_id, + origin_val + ); + + let service = service.get_ref().clone(); + let manager = service.room.room_manager.clone(); + manager.metrics.ws_connections_active.increment(1.0); + manager.metrics.ws_connections_total.increment(1); + + let logs = service.logs.clone(); + let (response, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?; + actix::spawn(async move { + let handler = WsRequestHandler::new(Arc::new(service), user_id); + let mut push_streams: PushStreams = HashMap::new(); + let mut shutdown_rx = manager.subscribe_shutdown(); + let mut last_heartbeat = Instant::now(); + let mut last_activity = Instant::now(); + let mut heartbeat_interval = tokio::time::interval(HEARTBEAT_INTERVAL); + heartbeat_interval.tick().await; + let mut message_count: u32 = 0; + let mut rate_window_start = Instant::now(); + loop { + tokio::select! { + _ = heartbeat_interval.tick() => { + if last_heartbeat.elapsed() > HEARTBEAT_TIMEOUT { + slog::warn!(logs, "WS universal heartbeat timeout for user {}", user_id); + manager.metrics.ws_heartbeat_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Policy.into())).await; + break; + } + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(logs, "WS universal idle timeout for user {}", user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + if session.ping(b"").await.is_err() { + break; + } + manager.metrics.ws_heartbeat_sent_total.increment(1); + } + _ = shutdown_rx.recv() => { + slog::info!(logs, "WS universal shutdown"); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + push_event = poll_push_streams(&mut push_streams) => { + match push_event { + Some(WsPushEvent::RoomMessage { room_id, event }) => { + let payload = serde_json::json!({ + "type": "event", + "event": "room.message", + "room_id": room_id, + "data": { + "id": event.id, + "room_id": event.room_id, + "sender_type": event.sender_type, + "sender_id": event.sender_id, + "thread_id": event.thread_id, + "content": event.content, + "content_type": event.content_type, + "send_at": event.send_at, + "seq": event.seq, + "display_name": event.display_name, + }, + }); + if session.text(payload.to_string()).await.is_err() { + break; + } + } + Some(WsPushEvent::ReactionUpdated { room_id, message_id, reactions }) => { + let payload = serde_json::json!({ + "type": "event", + "event": "room.reaction_updated", + "room_id": room_id, + "data": { + "message_id": message_id, + "reactions": reactions, + }, + }); + if session.text(payload.to_string()).await.is_err() { + break; + } + } + Some(WsPushEvent::AiStreamChunk { room_id, chunk }) => { + let payload = serde_json::json!({ + "type": "event", + "event": "ai.stream_chunk", + "room_id": room_id, + "data": { + "message_id": chunk.message_id, + "room_id": chunk.room_id, + "content": chunk.content, + "done": chunk.done, + "error": chunk.error, + }, + }); + if session.text(payload.to_string()).await.is_err() { + break; + } + } + None => { + } + } + } + msg = msg_stream.recv() => { + match msg { + Some(Ok(WsMessage::Ping(bytes))) => { + if session.pong(&bytes).await.is_err() { break; } + last_heartbeat = Instant::now(); + } + Some(Ok(WsMessage::Pong(_))) => { last_heartbeat = Instant::now(); } + Some(Ok(WsMessage::Text(text))) => { + if last_activity.elapsed() > MAX_IDLE_TIMEOUT { + slog::info!(logs, "WS universal idle timeout for user {}", user_id); + manager.metrics.ws_idle_timeout_total.increment(1); + let _ = session.close(Some(actix_ws::CloseCode::Normal.into())).await; + break; + } + last_activity = Instant::now(); + + if rate_window_start.elapsed() > RATE_LIMIT_WINDOW { + message_count = 0; + rate_window_start = Instant::now(); + } + message_count += 1; + if message_count > MAX_MESSAGES_PER_SECOND { + slog::warn!(logs, "WS universal rate limit exceeded for user {}", user_id); + manager.metrics.ws_rate_limit_hits.increment(1); + let _ = session.text(serde_json::json!({"type":"error","error":"rate_limit_exceeded"}).to_string()).await; + continue; + } + + if text.len() > MAX_TEXT_MESSAGE_LEN { + slog::warn!(logs, "WS universal message too long from user {}: {} bytes", user_id, text.len()); + let _ = session.text(serde_json::json!({"type":"error","error":"message_too_long"}).to_string()).await; + continue; + } + + match serde_json::from_str::(&text) { + Ok(request) => { + let action_str = request.action.to_string(); + match request.action { + WsAction::SubscribeRoom => { + if let Some(room_id) = request.params().room_id { + match manager.subscribe(room_id, user_id).await { + Ok(rx) => { + let stream_rx = manager.subscribe_room_stream(room_id).await; + push_streams.insert(room_id, ( + BroadcastStream::new(rx), + BroadcastStream::new(stream_rx), + )); + let _ = session.text(serde_json::to_string(&WsResponse::success( + request.request_id, &action_str, + WsResponseData::subscribed(Some(room_id), None) + )).unwrap_or_default()).await; + } + Err(e) => { + let _ = session.text(serde_json::to_string(&WsResponse::error_response( + request.request_id, &action_str, 403, "subscribe_failed", &format!("{}", e) + )).unwrap_or_default()).await; + } + } + } else { + let _ = session.text(serde_json::to_string(&WsResponse::error_response( + request.request_id, &action_str, 400, "bad_request", "room_id required" + )).unwrap_or_default()).await; + } + } + WsAction::UnsubscribeRoom => { + if let Some(room_id) = request.params().room_id { + manager.unsubscribe(room_id, user_id).await; + push_streams.remove(&room_id); + } + let _ = session.text(serde_json::to_string(&WsResponse::success( + request.request_id, &action_str, WsResponseData::bool(true) + )).unwrap_or_default()).await; + } + _ => { + let resp = handler.handle(request).await; + let _ = session.text(serde_json::to_string(&resp).unwrap_or_default()).await; + } + } + } + Err(e) => { + slog::warn!(logs, "WS universal parse error from user {}: {}", user_id, e); + let _ = session.text(serde_json::json!({"type":"error","error":"parse_error"}).to_string()).await; + } + } + } + Some(Ok(WsMessage::Binary(_))) => { break; } + Some(Ok(WsMessage::Continuation(_))) => {} + Some(Ok(WsMessage::Nop)) => {} + Some(Ok(WsMessage::Close(reason))) => { let _ = session.close(reason).await; break; } + Some(Err(e)) => { slog::warn!(logs, "WS error: {}", e); break; } + None => break, + } + } + } + } + + // Clean up subscriptions on disconnect + for room_id in push_streams.keys() { + manager.unsubscribe(*room_id, user_id).await; + } + manager.metrics.ws_connections_active.decrement(1.0); + manager.metrics.ws_disconnections_total.increment(1); + }); + + Ok(response) +} + +async fn poll_push_streams(streams: &mut PushStreams) -> Option { + loop { + let room_ids: Vec = streams.keys().copied().collect(); + for room_id in room_ids { + if let Some((msg_stream, chunk_stream)) = streams.get_mut(&room_id) { + tokio::select! { + result = msg_stream.next() => { + match result { + Some(Ok(event)) => { + if let Some(reactions) = event.reactions.clone() { + return Some(WsPushEvent::ReactionUpdated { + room_id: event.room_id, + message_id: event.id, + reactions, + }); + } + return Some(WsPushEvent::RoomMessage { room_id, event }); + } + Some(Err(_)) => { + streams.remove(&room_id); + } + None => { + streams.remove(&room_id); + } + } + } + result = chunk_stream.next() => { + match result { + Some(Ok(chunk)) => { + return Some(WsPushEvent::AiStreamChunk { room_id, chunk }); + } + Some(Err(_)) | None => { + streams.remove(&room_id); + } + } + } + } + } + } + if streams.is_empty() { + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + return None; + } + tokio::task::yield_now().await; + } +} + +fn extract_user_id_from_token(token: &str) -> Option { + if token.len() < 64 { + return None; + } + let token_data = base64_decode(token)?; + if token_data.len() < 16 { + return None; + } + let bytes: [u8; 16] = token_data[..16].try_into().ok()?; + Some(Uuid::from_bytes(bytes)) +} + +fn base64_decode(input: &str) -> Option> { + let table = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + let mut result = Vec::with_capacity(input.len() * 3 / 4); + let mut buffer: u32 = 0; + let mut bits = 0; + + for byte in input.bytes() { + if byte == b'=' || byte == b'\n' || byte == b'\r' || byte == b' ' { + continue; + } + let idx = table.iter().position(|&x| x == byte)?; + buffer = (buffer << 6) | (idx as u32); + bits += 6; + if bits >= 8 { + bits -= 8; + result.push((buffer >> bits) as u8); + } + } + Some(result) +} diff --git a/libs/api/route.rs b/libs/api/route.rs new file mode 100644 index 0000000..5d069c7 --- /dev/null +++ b/libs/api/route.rs @@ -0,0 +1,29 @@ +use actix_web::web; + +pub fn init_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/ws") + .route("", web::get().to(crate::room::ws_universal::ws_universal)) + .route("/rooms/{room_id}", web::get().to(crate::room::ws::ws_room)) + .route( + "/projects/{project_id}", + web::get().to(crate::room::ws::ws_project), + ), + ); + + cfg.service( + web::scope("/api") + .configure(crate::auth::init_auth_routes) + .configure(crate::git::init_git_routes) + .configure(crate::git::init_git_toplevel_routes) + .configure(crate::issue::init_issue_routes) + .configure(crate::project::init_project_routes) + .configure(crate::user::init_user_routes) + .configure(crate::pull_request::init_pull_request_routes) + .configure(crate::agent::init_agent_routes) + .configure(crate::workspace::init_workspace_routes) + .configure(crate::search::init_search_routes) + .configure(crate::room::init_room_routes) + .configure(crate::skill::init_skill_routes), + ); +} diff --git a/libs/api/search/mod.rs b/libs/api/search/mod.rs new file mode 100644 index 0000000..d36046e --- /dev/null +++ b/libs/api/search/mod.rs @@ -0,0 +1,7 @@ +pub mod service; + +use actix_web::web; + +pub fn init_search_routes(cfg: &mut web::ServiceConfig) { + cfg.route("/search", web::to(service::search)); +} diff --git a/libs/api/search/service.rs b/libs/api/search/service.rs new file mode 100644 index 0000000..186ff27 --- /dev/null +++ b/libs/api/search/service.rs @@ -0,0 +1,31 @@ +use crate::ApiResponse; +use crate::error::ApiError; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::search::{SearchQuery, SearchResponse}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/search", + params( + ("q" = String, Query, description = "Search keyword", min_length = 1, max_length = 200), + ("type" = Option, Query, description = "Comma-separated types: projects,repos,issues,users. Default: all"), + ("page" = Option, Query, description = "Page number, default 1"), + ("per_page" = Option, Query, description = "Results per page, default 20, max 100"), + ), + responses( + (status = 200, description = "Search results", body = ApiResponse), + (status = 400, description = "Bad request"), + (status = 401, description = "Unauthorized"), + ), + tag = "Search" +)] +pub async fn search( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service.search(&session, query.into_inner()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/skill.rs b/libs/api/skill.rs new file mode 100644 index 0000000..4c0510d --- /dev/null +++ b/libs/api/skill.rs @@ -0,0 +1,244 @@ +//! Skill management API endpoints. + +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +use crate::{ApiResponse, error::ApiError}; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct SkillPath { + pub project_name: String, + pub slug: String, +} + +#[derive(Debug, serde::Deserialize, utoipa::IntoParams)] +pub struct SkillQuery { + pub source: Option, + pub enabled: Option, +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/skills", + params( + ("project_name" = String, Path), + ), + responses( + (status = 200, description = "List skills", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Project not found"), + ), + tag = "Skill" +)] +pub async fn skill_list( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let project_name = path.into_inner(); + let project = service + .project_info(&session, project_name.clone()) + .await?; + + let q = service::skill::info::SkillListQuery { + source: query.source.clone(), + enabled: query.enabled, + }; + + let skills = service + .skill_list(project.uid.to_string(), q, &session) + .await?; + + Ok(ApiResponse::ok(skills).to_response()) +} + +#[utoipa::path( + get, + path = "/api/projects/{project_name}/skills/{slug}", + params( + ("project_name" = String, Path), + ("slug" = String, Path), + ), + responses( + (status = 200, description = "Get skill", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Skill" +)] +pub async fn skill_get( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let SkillPath { + project_name, + slug, + } = path.into_inner(); + + let project = service + .project_info(&session, project_name.clone()) + .await?; + + let skill = service + .skill_get(project.uid.to_string(), slug, &session) + .await?; + + Ok(ApiResponse::ok(skill).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/skills", + params(("project_name" = String, Path)), + request_body = service::skill::manage::CreateSkillRequest, + responses( + (status = 200, description = "Create skill", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 409, description = "Skill already exists"), + ), + tag = "Skill" +)] +pub async fn skill_create( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let project_name = path.into_inner(); + let project = service + .project_info(&session, project_name.clone()) + .await?; + + let skill = service + .skill_create(project.uid.to_string(), body.into_inner(), &session) + .await?; + + Ok(ApiResponse::ok(skill).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/projects/{project_name}/skills/{slug}", + params( + ("project_name" = String, Path), + ("slug" = String, Path), + ), + request_body = service::skill::manage::UpdateSkillRequest, + responses( + (status = 200, description = "Update skill", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Skill" +)] +pub async fn skill_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let SkillPath { + project_name, + slug, + } = path.into_inner(); + + let project = service + .project_info(&session, project_name.clone()) + .await?; + + let skill = service + .skill_update(project.uid.to_string(), slug, body.into_inner(), &session) + .await?; + + Ok(ApiResponse::ok(skill).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/projects/{project_name}/skills/{slug}", + params( + ("project_name" = String, Path), + ("slug" = String, Path), + ), + responses( + (status = 200, description = "Delete skill", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), + ), + tag = "Skill" +)] +pub async fn skill_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let SkillPath { + project_name, + slug, + } = path.into_inner(); + + let project = service + .project_info(&session, project_name.clone()) + .await?; + + let result = service + .skill_delete(project.uid.to_string(), slug, &session) + .await?; + + Ok(ApiResponse::ok(result).to_response()) +} + +#[utoipa::path( + post, + path = "/api/projects/{project_name}/skills/scan", + params(("project_name" = String, Path)), + responses( + (status = 200, description = "Scan repos for skills", body = ApiResponse), + (status = 401, description = "Unauthorized"), + ), + tag = "Skill" +)] +pub async fn skill_scan( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let project_name = path.into_inner(); + let project = service + .project_info(&session, project_name) + .await?; + + let result = service + .skill_scan_repos(project.uid, project.uid) + .await?; + + Ok(ApiResponse::ok(ScanResponse { + discovered: result.discovered, + created: result.created, + updated: result.updated, + removed: result.removed, + }).to_response()) +} + +#[derive(serde::Serialize, utoipa::ToSchema)] +pub struct ScanResponse { + pub discovered: i64, + pub created: i64, + pub updated: i64, + pub removed: i64, +} + +pub fn init_skill_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/projects/{project_name}/skills") + .route("", web::get().to(skill_list)) + .route("", web::post().to(skill_create)) + .route("/scan", web::post().to(skill_scan)) + .route("/{slug}", web::get().to(skill_get)) + .route("/{slug}", web::patch().to(skill_update)) + .route("/{slug}", web::delete().to(skill_delete)), + ); +} diff --git a/libs/api/user/access_key.rs b/libs/api/user/access_key.rs new file mode 100644 index 0000000..a9b1261 --- /dev/null +++ b/libs/api/user/access_key.rs @@ -0,0 +1,63 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/users/me/access-keys", + request_body = service::user::access_key::CreateAccessKeyParams, + responses( + (status = 200, description = "Create access key", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn create_access_key( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .user_create_access_key(&session, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/access-keys", + responses( + (status = 200, description = "List access keys", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn list_access_keys( + service: web::Data, + session: Session, +) -> Result { + let resp = service.user_list_access_keys(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/users/me/access-keys/{access_key_id}", + params(("access_key_id" = i64, Path)), + responses( + (status = 200, description = "Delete access key"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn delete_access_key( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let id = path.into_inner(); + service.user_delete_access_key(&session, id).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/user/chpc.rs b/libs/api/user/chpc.rs new file mode 100644 index 0000000..32f66fd --- /dev/null +++ b/libs/api/user/chpc.rs @@ -0,0 +1,48 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/{username}/heatmap", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get contribution heatmap", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_contribution_heatmap( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let username = path.into_inner(); + let resp = service + .get_user_contribution_heatmap(session, username, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/heatmap", + responses( + (status = 200, description = "Get my contribution heatmap", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_my_contribution_heatmap( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service + .get_current_user_contribution_heatmap(session, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/mod.rs b/libs/api/user/mod.rs new file mode 100644 index 0000000..86d1f92 --- /dev/null +++ b/libs/api/user/mod.rs @@ -0,0 +1,118 @@ +pub mod access_key; +pub mod chpc; +pub mod notification; +pub mod preferences; +pub mod profile; +pub mod projects; +pub mod repository; +pub mod ssh_key; +pub mod subscribe; +pub mod user_info; + +use actix_web::web; + +pub fn init_user_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/users") + .route("/me/profile", web::get().to(profile::get_my_profile)) + .route("/me/profile", web::patch().to(profile::update_my_profile)) + .route( + "/me/preferences", + web::get().to(preferences::get_preferences), + ) + .route( + "/me/preferences", + web::patch().to(preferences::update_preferences), + ) + .route("/me/keys", web::post().to(ssh_key::add_ssh_key)) + .route("/me/keys", web::get().to(ssh_key::list_ssh_keys)) + .route("/me/keys/{key_id}", web::get().to(ssh_key::get_ssh_key)) + .route( + "/me/keys/{key_id}", + web::patch().to(ssh_key::update_ssh_key), + ) + .route( + "/me/keys/{key_id}", + web::delete().to(ssh_key::delete_ssh_key), + ) + .route( + "/me/access-keys", + web::post().to(access_key::create_access_key), + ) + .route( + "/me/access-keys", + web::get().to(access_key::list_access_keys), + ) + .route( + "/me/access-keys/{access_key_id}", + web::delete().to(access_key::delete_access_key), + ) + .route( + "/me/notifications/preferences", + web::get().to(notification::get_notification_preferences), + ) + .route( + "/me/notifications/preferences", + web::patch().to(notification::update_notification_preferences), + ) + .route( + "/me/heatmap", + web::get().to(chpc::get_my_contribution_heatmap), + ) + .route( + "/me/projects", + web::get().to(projects::get_current_user_projects), + ) + .route( + "/me/repos", + web::get().to(repository::get_current_user_repos), + ) + // /users/{username}/... + .route( + "/{username}", + web::get().to(profile::get_profile_by_username), + ) + .route("/{username}/info", web::get().to(user_info::get_user_info)) + .route( + "/{username}/heatmap", + web::get().to(chpc::get_contribution_heatmap), + ) + .route("/{username}/keys", web::get().to(ssh_key::list_ssh_keys)) + .route( + "/{username}/keys/{key_id}", + web::get().to(ssh_key::get_ssh_key), + ) + .route( + "/{username}/projects", + web::get().to(projects::get_user_projects), + ) + .route( + "/{username}/repos", + web::get().to(repository::get_user_repos), + ) + .route( + "/{username}/follow", + web::post().to(subscribe::subscribe_target), + ) + .route( + "/{username}/follow", + web::delete().to(subscribe::unsubscribe_target), + ) + .route( + "/{username}/follow", + web::get().to(subscribe::is_subscribed_to_target), + ) + .route( + "/{username}/followers", + web::get().to(subscribe::get_subscribers), + ) + .route( + "/{username}/following/count", + web::get().to(subscribe::get_subscription_count), + ) + .route( + "/{username}/followers/count", + web::get().to(subscribe::get_subscriber_count), + ), + ); +} diff --git a/libs/api/user/notification.rs b/libs/api/user/notification.rs new file mode 100644 index 0000000..5bbf7eb --- /dev/null +++ b/libs/api/user/notification.rs @@ -0,0 +1,42 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/me/notifications/preferences", + responses( + (status = 200, description = "Get notification preferences", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_notification_preferences( + service: web::Data, + session: Session, +) -> Result { + let resp = service.user_get_notification_preferences(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/users/me/notifications/preferences", + request_body = service::user::notification::NotificationPreferencesParams, + responses( + (status = 200, description = "Update notification preferences", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn update_notification_preferences( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .user_update_notification_preferences(&session, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/preferences.rs b/libs/api/user/preferences.rs new file mode 100644 index 0000000..4114e80 --- /dev/null +++ b/libs/api/user/preferences.rs @@ -0,0 +1,42 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/me/preferences", + responses( + (status = 200, description = "Get user preferences", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_preferences( + service: web::Data, + session: Session, +) -> Result { + let resp = service.user_get_preferences(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/users/me/preferences", + request_body = service::user::preferences::PreferencesParams, + responses( + (status = 200, description = "Update user preferences", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn update_preferences( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .user_update_preferences(&session, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/profile.rs b/libs/api/user/profile.rs new file mode 100644 index 0000000..a6bf953 --- /dev/null +++ b/libs/api/user/profile.rs @@ -0,0 +1,62 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/me/profile", + responses( + (status = 200, description = "Get current user profile", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_my_profile( + service: web::Data, + session: Session, +) -> Result { + let resp = service.user_get_current_profile(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/{username}", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get user profile", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_profile_by_username( + service: web::Data, + path: web::Path, +) -> Result { + let username = path.into_inner(); + let resp = service.user_get_profile_by_username(username).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/users/me/profile", + request_body = service::user::profile::UpdateProfileParams, + responses( + (status = 200, description = "Update current user profile", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn update_my_profile( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .user_update_profile(&session, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/projects.rs b/libs/api/user/projects.rs new file mode 100644 index 0000000..01c2bb2 --- /dev/null +++ b/libs/api/user/projects.rs @@ -0,0 +1,48 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/{username}/projects", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get user projects", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_user_projects( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let username = path.into_inner(); + let resp = service + .get_user_projects(session, username, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/projects", + responses( + (status = 200, description = "Get current user projects", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_current_user_projects( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service + .get_current_user_projects(session, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/repository.rs b/libs/api/user/repository.rs new file mode 100644 index 0000000..354c8cf --- /dev/null +++ b/libs/api/user/repository.rs @@ -0,0 +1,48 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/{username}/repos", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get user repos", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_user_repos( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let username = path.into_inner(); + let resp = service + .get_user_repos(session, username, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/repos", + responses( + (status = 200, description = "Get current user repos", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn get_current_user_repos( + service: web::Data, + session: Session, + query: web::Query, +) -> Result { + let resp = service + .get_current_user_repos(session, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/user/ssh_key.rs b/libs/api/user/ssh_key.rs new file mode 100644 index 0000000..fe8a20b --- /dev/null +++ b/libs/api/user/ssh_key.rs @@ -0,0 +1,110 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/users/me/keys", + request_body = service::user::ssh_key::AddSshKeyParams, + responses( + (status = 200, description = "Add SSH key", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn add_ssh_key( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let resp = service + .user_add_ssh_key(&session, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/keys", + responses( + (status = 200, description = "List SSH keys", body = ApiResponse), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn list_ssh_keys( + service: web::Data, + session: Session, +) -> Result { + let resp = service.user_list_ssh_keys(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/me/keys/{key_id}", + params(("key_id" = i64, Path)), + responses( + (status = 200, description = "Get SSH key", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_ssh_key( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let resp = service + .user_get_ssh_key(&session, path.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + patch, + path = "/api/users/me/keys/{key_id}", + params(("key_id" = i64, Path)), + request_body = service::user::ssh_key::UpdateSshKeyParams, + responses( + (status = 200, description = "Update SSH key", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn update_ssh_key( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let resp = service + .user_update_ssh_key(&session, path.into_inner(), body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/users/me/keys/{key_id}", + params(("key_id" = i64, Path)), + responses( + (status = 200, description = "Delete SSH key"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn delete_ssh_key( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + service + .user_delete_ssh_key(&session, path.into_inner()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/user/subscribe.rs b/libs/api/user/subscribe.rs new file mode 100644 index 0000000..cae48fa --- /dev/null +++ b/libs/api/user/subscribe.rs @@ -0,0 +1,133 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + post, + path = "/api/users/{username}/follow", + params(("username" = String, Path)), + responses( + (status = 200, description = "Follow user"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn subscribe_target( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let target = path.into_inner(); + service.user_subscribe_target(session, target).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/users/{username}/follow", + params(("username" = String, Path)), + responses( + (status = 200, description = "Unfollow user"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn unsubscribe_target( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let target = path.into_inner(); + service.user_unsubscribe_target(session, target).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/{username}/follow", + params(("username" = String, Path)), + responses( + (status = 200, description = "Check if following user"), + (status = 401, description = "Unauthorized"), +), + tag = "User" +)] +pub async fn is_subscribed_to_target( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let target = path.into_inner(); + let resp = service + .user_is_subscribed_to_target(session, target) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "is_subscribed": resp })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/{username}/followers", + params(("username" = String, Path)), + responses( + (status = 200, description = "List followers", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_subscribers( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let target = path.into_inner(); + let resp = service.user_get_subscribers(session, target).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/{username}/following/count", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get following count"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_subscription_count( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let username = path.into_inner(); + let resp = service + .user_get_subscription_count(session, username) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "count": resp })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/users/{username}/followers/count", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get follower count"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_subscriber_count( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let username = path.into_inner(); + let resp = service.user_get_subscriber_count(session, username).await?; + Ok(ApiResponse::ok(serde_json::json!({ "count": resp })).to_response()) +} diff --git a/libs/api/user/user_info.rs b/libs/api/user/user_info.rs new file mode 100644 index 0000000..2f3d24e --- /dev/null +++ b/libs/api/user/user_info.rs @@ -0,0 +1,25 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/users/{username}/info", + params(("username" = String, Path)), + responses( + (status = 200, description = "Get user info", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Not found"), +), + tag = "User" +)] +pub async fn get_user_info( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let username = path.into_inner(); + let resp = service.user_info(session, username).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/billing.rs b/libs/api/workspace/billing.rs new file mode 100644 index 0000000..2d9cd02 --- /dev/null +++ b/libs/api/workspace/billing.rs @@ -0,0 +1,84 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::billing::{ + WorkspaceBillingAddCreditParams, WorkspaceBillingCurrentResponse, WorkspaceBillingHistoryQuery, + WorkspaceBillingHistoryResponse, +}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/billing", + params( + ("slug" = String, Path, description = "Workspace slug") + ), + responses( + (status = 200, description = "Get workspace billing info", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Not a workspace member"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_billing_current( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let slug = path.into_inner(); + let resp = service.workspace_billing_current(&session, slug).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/billing/history", + params( + ("slug" = String, Path, description = "Workspace slug"), + ), + responses( + (status = 200, description = "Get workspace billing history", body = ApiResponse), + (status = 401, description = "Unauthorized"), + ), + tag = "Workspace" +)] +pub async fn workspace_billing_history( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let slug = path.into_inner(); + let resp = service + .workspace_billing_history(&session, slug, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + post, + path = "/api/workspaces/{slug}/billing/credits", + params( + ("slug" = String, Path, description = "Workspace slug") + ), + request_body = WorkspaceBillingAddCreditParams, + responses( + (status = 200, description = "Add credit to workspace billing", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Not a workspace member"), + ), + tag = "Workspace" +)] +pub async fn workspace_billing_add_credit( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let slug = path.into_inner(); + let resp = service + .workspace_billing_add_credit(&session, slug, body.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/info.rs b/libs/api/workspace/info.rs new file mode 100644 index 0000000..eee183b --- /dev/null +++ b/libs/api/workspace/info.rs @@ -0,0 +1,42 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use session::Session; + +#[utoipa::path( + get, + path = "/api/workspaces/me", + responses( + (status = 200, description = "List my workspaces", body = ApiResponse), + (status = 401, description = "Unauthorized"), + ), + tag = "Workspace" +)] +pub async fn workspace_list( + service: web::Data, + session: Session, +) -> Result { + let resp = service.workspace_list(&session).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}", + params(("slug" = String, Path)), + responses( + (status = 200, description = "Get workspace info", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_info( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let slug = path.into_inner(); + let resp = service.workspace_info(&session, slug).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/init.rs b/libs/api/workspace/init.rs new file mode 100644 index 0000000..6080457 --- /dev/null +++ b/libs/api/workspace/init.rs @@ -0,0 +1,26 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::init::WorkspaceInitParams; +use session::Session; + +#[utoipa::path( + post, + path = "/api/workspaces", + request_body = WorkspaceInitParams, + responses( + (status = 200, description = "Create workspace", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 409, description = "Slug or name already exists"), + ), + tag = "Workspace" +)] +pub async fn workspace_create( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let ws = service.workspace_init(&session, body.into_inner()).await?; + let resp = service.workspace_info(&session, ws.slug.clone()).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/members.rs b/libs/api/workspace/members.rs new file mode 100644 index 0000000..c544ba6 --- /dev/null +++ b/libs/api/workspace/members.rs @@ -0,0 +1,205 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::members::{ + PendingInvitationInfo, WorkspaceInviteAcceptParams, WorkspaceInviteParams, + WorkspaceMembersResponse, +}; +use session::Session; +use uuid::Uuid; + +#[derive(serde::Deserialize, utoipa::IntoParams)] +pub struct MembersQuery { + pub page: Option, + pub per_page: Option, +} + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/members", + params( + ("slug" = String, Path), + ("page" = Option, Query), + ("per_page" = Option, Query), + ), + responses( + (status = 200, description = "List workspace members", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Not a member"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_members( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let slug = path.into_inner(); + let resp = service + .workspace_members(&session, slug, query.page, query.per_page) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[derive(serde::Deserialize, utoipa::ToSchema)] +pub struct UpdateRoleParams { + pub user_id: Uuid, + pub role: String, +} + +#[utoipa::path( + patch, + path = "/api/workspaces/{slug}/members/role", + params(("slug" = String, Path)), + request_body = UpdateRoleParams, + responses( + (status = 200, description = "Update member role"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "Workspace or member not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_update_member_role( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let slug = path.into_inner(); + service + .workspace_update_member_role(&session, slug, body.user_id, body.role.clone()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/workspaces/{slug}/members/{user_id}", + params( + ("slug" = String, Path), + ("user_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Remove member"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "Member not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_remove_member( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (slug, user_id) = path.into_inner(); + service + .workspace_remove_member(&session, slug, user_id) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/invitations", + params(("slug" = String, Path)), + responses( + (status = 200, description = "List pending invitations", body = ApiResponse>), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_pending_invitations( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let slug = path.into_inner(); + let resp = service + .workspace_pending_invitations(&session, slug) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/workspaces/{slug}/invitations/{user_id}", + params( + ("slug" = String, Path), + ("user_id" = Uuid, Path), + ), + responses( + (status = 200, description = "Cancel invitation"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "Invitation not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_cancel_invitation( + service: web::Data, + session: Session, + path: web::Path<(String, Uuid)>, +) -> Result { + let (slug, user_id) = path.into_inner(); + service + .workspace_cancel_invitation(&session, slug, user_id) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + post, + path = "/api/workspaces/{slug}/invitations", + params(("slug" = String, Path)), + request_body = WorkspaceInviteParams, + responses( + (status = 200, description = "Send invitation"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "User not found"), + (status = 409, description = "Already a member"), + ), + tag = "Workspace" +)] +pub async fn workspace_invite_member( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let slug = path.into_inner(); + service + .workspace_invite_member(&session, slug, body.into_inner()) + .await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} + +#[utoipa::path( + post, + path = "/api/workspaces/invitations/accept", + request_body = WorkspaceInviteAcceptParams, + responses( + (status = 200, description = "Accept invitation", body = ApiResponse), + (status = 400, description = "Invalid or expired token"), + (status = 401, description = "Unauthorized"), + (status = 409, description = "Already accepted"), + ), + tag = "Workspace" +)] +pub async fn workspace_accept_invitation( + service: web::Data, + session: Session, + body: web::Json, +) -> Result { + let ws = service + .workspace_accept_invitation(&session, body.into_inner()) + .await?; + let resp = service.workspace_info(&session, ws.slug).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/mod.rs b/libs/api/workspace/mod.rs new file mode 100644 index 0000000..eee6615 --- /dev/null +++ b/libs/api/workspace/mod.rs @@ -0,0 +1,68 @@ +pub mod billing; +pub mod info; +pub mod init; +pub mod members; +pub mod projects; +pub mod settings; +pub mod stats; + +use actix_web::web; + +pub fn init_workspace_routes(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("/workspaces") + .route("", web::post().to(init::workspace_create)) + .route("/me", web::get().to(info::workspace_list)) + .route("/{slug}", web::get().to(info::workspace_info)) + // Billing + .route( + "/{slug}/billing", + web::get().to(billing::workspace_billing_current), + ) + .route( + "/{slug}/billing/history", + web::get().to(billing::workspace_billing_history), + ) + .route( + "/{slug}/billing/credits", + web::post().to(billing::workspace_billing_add_credit), + ) + // Projects + .route( + "/{slug}/projects", + web::get().to(projects::workspace_projects), + ) + // Stats + .route("/{slug}/stats", web::get().to(stats::workspace_stats)) + // Settings + .route("/{slug}", web::patch().to(settings::workspace_update)) + .route("/{slug}", web::delete().to(settings::workspace_delete)) + // Members + .route("/{slug}/members", web::get().to(members::workspace_members)) + .route( + "/{slug}/members/{user_id}", + web::delete().to(members::workspace_remove_member), + ) + .route( + "/{slug}/members/role", + web::patch().to(members::workspace_update_member_role), + ) + // Invitations + .route( + "/{slug}/invitations", + web::post().to(members::workspace_invite_member), + ) + .route( + "/{slug}/invitations", + web::get().to(members::workspace_pending_invitations), + ) + .route( + "/{slug}/invitations/{user_id}", + web::delete().to(members::workspace_cancel_invitation), + ) + .route( + "/invitations/accept", + web::post().to(members::workspace_accept_invitation), + ), + ); +} diff --git a/libs/api/workspace/projects.rs b/libs/api/workspace/projects.rs new file mode 100644 index 0000000..f815097 --- /dev/null +++ b/libs/api/workspace/projects.rs @@ -0,0 +1,32 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::info::{WorkspaceProjectsQuery, WorkspaceProjectsResponse}; +use session::Session; + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/projects", + params( + ("slug" = String, Path, description = "Workspace slug"), + ), + responses( + (status = 200, description = "List workspace projects", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Not a workspace member"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_projects( + service: web::Data, + session: Session, + path: web::Path, + query: web::Query, +) -> Result { + let slug = path.into_inner(); + let resp = service + .workspace_projects(&session, slug, query.into_inner()) + .await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/api/workspace/settings.rs b/libs/api/workspace/settings.rs new file mode 100644 index 0000000..f4d770c --- /dev/null +++ b/libs/api/workspace/settings.rs @@ -0,0 +1,55 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::settings::WorkspaceUpdateParams; +use session::Session; + +#[utoipa::path( + patch, + path = "/api/workspaces/{slug}", + params(("slug" = String, Path)), + request_body = WorkspaceUpdateParams, + responses( + (status = 200, description = "Update workspace", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied"), + (status = 404, description = "Workspace not found"), + (status = 409, description = "Name already exists"), + ), + tag = "Workspace" +)] +pub async fn workspace_update( + service: web::Data, + session: Session, + path: web::Path, + body: web::Json, +) -> Result { + let slug = path.into_inner(); + let ws = service + .workspace_update(&session, slug, body.into_inner()) + .await?; + let resp = service.workspace_info(&session, ws.slug).await?; + Ok(ApiResponse::ok(resp).to_response()) +} + +#[utoipa::path( + delete, + path = "/api/workspaces/{slug}", + params(("slug" = String, Path)), + responses( + (status = 200, description = "Delete workspace"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Permission denied (owner only)"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_delete( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let slug = path.into_inner(); + service.workspace_delete(&session, slug).await?; + Ok(ApiResponse::ok(serde_json::json!({ "success": true })).to_response()) +} diff --git a/libs/api/workspace/stats.rs b/libs/api/workspace/stats.rs new file mode 100644 index 0000000..bbe272b --- /dev/null +++ b/libs/api/workspace/stats.rs @@ -0,0 +1,29 @@ +use crate::{ApiResponse, error::ApiError}; +use actix_web::{HttpResponse, Result, web}; +use service::AppService; +use service::workspace::info::WorkspaceStatsResponse; +use session::Session; + +#[utoipa::path( + get, + path = "/api/workspaces/{slug}/stats", + params( + ("slug" = String, Path, description = "Workspace slug") + ), + responses( + (status = 200, description = "Get workspace stats", body = ApiResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Not a workspace member"), + (status = 404, description = "Workspace not found"), + ), + tag = "Workspace" +)] +pub async fn workspace_stats( + service: web::Data, + session: Session, + path: web::Path, +) -> Result { + let slug = path.into_inner(); + let resp = service.workspace_stats(&session, slug).await?; + Ok(ApiResponse::ok(resp).to_response()) +} diff --git a/libs/avatar/Cargo.toml b/libs/avatar/Cargo.toml new file mode 100644 index 0000000..66150db --- /dev/null +++ b/libs/avatar/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "avatar" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "avatar" +[dependencies] +config = { workspace = true } +anyhow = { workspace = true } +image = { workspace = true } +serde = { workspace = true, features = ["derive"] } +[lints] +workspace = true diff --git a/libs/avatar/lib.rs b/libs/avatar/lib.rs new file mode 100644 index 0000000..2cd7373 --- /dev/null +++ b/libs/avatar/lib.rs @@ -0,0 +1,45 @@ +use config::AppConfig; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Clone, Debug)] +pub struct AppAvatar { + pub basic_path: PathBuf, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct AvatarLoad { + w: Option, + h: Option, +} + +impl AppAvatar { + pub async fn init(cfg: &AppConfig) -> anyhow::Result { + let path = cfg.avatar_path()?; + if std::fs::read_dir(&path).is_err() { + std::fs::create_dir_all(&path)?; + } + let basic_path = PathBuf::from(path); + Ok(Self { basic_path }) + } + pub async fn upload(&self, file: Vec, file_name: String, ext: &str) -> anyhow::Result<()> { + let image = image::load_from_memory(&*file)?; + image.save(self.basic_path.join(format!("{}.{}", file_name, ext)))?; + Ok(()) + } + pub async fn load(&self, file_name: String, load: AvatarLoad) -> anyhow::Result> { + let path = self.basic_path.join(format!("{}.png", file_name)); + let image = image::open(path)?; + let (w, h) = ( + load.w.unwrap_or(image.width()), + load.h.unwrap_or(image.height()), + ); + let image = image.resize(w, h, image::imageops::FilterType::Nearest); + Ok(image.as_bytes().to_vec()) + } + pub async fn delete(&self, file_name: String, ext: &str) -> anyhow::Result<()> { + let path = self.basic_path.join(format!("{}.{}", file_name, ext)); + std::fs::remove_file(path)?; + Ok(()) + } +} diff --git a/libs/config/Cargo.toml b/libs/config/Cargo.toml new file mode 100644 index 0000000..f0779f5 --- /dev/null +++ b/libs/config/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "config" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "config" +[dependencies] +dotenvy = { workspace = true } +anyhow = { workspace = true } +serde = { workspace = true, features = ["derive"] } +uuid = { workspace = true, features = ["v4"] } +num_cpus = { workspace = true } +[lints] +workspace = true diff --git a/libs/config/ai.rs b/libs/config/ai.rs new file mode 100644 index 0000000..e646bb7 --- /dev/null +++ b/libs/config/ai.rs @@ -0,0 +1,16 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn ai_basic_url(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_AI_BASIC_URL") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_AI_BASIC_URL not found")) + } + pub fn ai_api_key(&self) -> anyhow::Result { + if let Some(api_key) = self.env.get("APP_AI_API_KEY") { + return Ok(api_key.to_string()); + } + Err(anyhow::anyhow!("APP_AI_API_KEY not found")) + } +} diff --git a/libs/config/app.rs b/libs/config/app.rs new file mode 100644 index 0000000..231a299 --- /dev/null +++ b/libs/config/app.rs @@ -0,0 +1,23 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn app_name(&self) -> anyhow::Result { + if let Some(name) = self.env.get("APP_NAME") { + return Ok(name.to_string()); + } + Ok(env!("CARGO_PKG_NAME").to_string()) + } + + pub fn app_version(&self) -> anyhow::Result { + if let Some(version) = self.env.get("APP_VERSION") { + return Ok(version.to_string()); + } + Ok(env!("CARGO_PKG_VERSION").to_string()) + } + pub fn app_description(&self) -> anyhow::Result { + if let Some(description) = self.env.get("APP_DESCRIPTION") { + return Ok(description.to_string()); + } + Ok(env!("CARGO_PKG_DESCRIPTION").to_string()) + } +} diff --git a/libs/config/avatar.rs b/libs/config/avatar.rs new file mode 100644 index 0000000..ddd09fe --- /dev/null +++ b/libs/config/avatar.rs @@ -0,0 +1,17 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn avatar_path(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_AVATAR_PATH") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_AVATAR_PATH not found")) + } + + pub fn repos_root(&self) -> anyhow::Result { + if let Some(root) = self.env.get("APP_REPOS_ROOT") { + return Ok(root.to_string()); + } + Ok("/data/repos".to_string()) + } +} diff --git a/libs/config/database.rs b/libs/config/database.rs new file mode 100644 index 0000000..52652fe --- /dev/null +++ b/libs/config/database.rs @@ -0,0 +1,70 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn database_url(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_DATABASE_URL") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_DATABASE_URL not found")) + } + pub fn database_max_connections(&self) -> anyhow::Result { + if let Some(max_connections) = self.env.get("APP_DATABASE_MAX_CONNECTIONS") { + return Ok(max_connections.parse::()?); + } + Ok(10) + } + pub fn database_min_connections(&self) -> anyhow::Result { + if let Some(min_connections) = self.env.get("APP_DATABASE_MIN_CONNECTIONS") { + return Ok(min_connections.parse::()?); + } + Ok(2) + } + pub fn database_idle_timeout(&self) -> anyhow::Result { + if let Some(idle_timeout) = self.env.get("APP_DATABASE_IDLE_TIMEOUT") { + return Ok(idle_timeout.parse::()?); + } + Ok(60000) + } + pub fn database_max_lifetime(&self) -> anyhow::Result { + if let Some(max_lifetime) = self.env.get("APP_DATABASE_MAX_LIFETIME") { + return Ok(max_lifetime.parse::()?); + } + Ok(300000) + } + pub fn database_connection_timeout(&self) -> anyhow::Result { + if let Some(connection_timeout) = self.env.get("APP_DATABASE_CONNECTION_TIMEOUT") { + return Ok(connection_timeout.parse::()?); + } + Ok(5000) + } + pub fn database_schema_search_path(&self) -> anyhow::Result { + if let Some(schema_search_path) = self.env.get("APP_DATABASE_SCHEMA_SEARCH_PATH") { + return Ok(schema_search_path.to_string()); + } + Ok("public".to_string()) + } + pub fn database_read_replicas(&self) -> anyhow::Result> { + if let Some(replicas) = self.env.get("APP_DATABASE_REPLICAS") { + return Ok(replicas.split(',').map(|s| s.to_string()).collect()); + } + Ok(vec![]) + } + pub fn database_health_check_interval(&self) -> anyhow::Result { + if let Some(interval) = self.env.get("APP_DATABASE_HEALTH_CHECK_INTERVAL") { + return Ok(interval.parse::()?); + } + Ok(30) + } + pub fn database_retry_attempts(&self) -> anyhow::Result { + if let Some(attempts) = self.env.get("APP_DATABASE_RETRY_ATTEMPTS") { + return Ok(attempts.parse::()?); + } + Ok(3) + } + pub fn database_retry_delay(&self) -> anyhow::Result { + if let Some(delay) = self.env.get("APP_DATABASE_RETRY_DELAY") { + return Ok(delay.parse::()?); + } + Ok(5) + } +} diff --git a/libs/config/domain.rs b/libs/config/domain.rs new file mode 100644 index 0000000..702d272 --- /dev/null +++ b/libs/config/domain.rs @@ -0,0 +1,29 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn main_domain(&self) -> anyhow::Result { + if let Some(domain_url) = self.env.get("APP_DOMAIN_URL") { + return Ok(domain_url.to_string()); + } + Ok("http://127.0.0.1".to_string()) + } + + pub fn static_domain(&self) -> anyhow::Result { + if let Some(static_domain) = self.env.get("APP_STATIC_DOMAIN") { + return Ok(static_domain.to_string()); + } + self.main_domain() + } + pub fn media_domain(&self) -> anyhow::Result { + if let Some(media_domain) = self.env.get("APP_MEDIA_DOMAIN") { + return Ok(media_domain.to_string()); + } + self.main_domain() + } + pub fn git_http_domain(&self) -> anyhow::Result { + if let Some(git_http_domain) = self.env.get("APP_GIT_HTTP_DOMAIN") { + return Ok(git_http_domain.to_string()); + } + self.main_domain() + } +} diff --git a/libs/config/embed.rs b/libs/config/embed.rs new file mode 100644 index 0000000..8a816b4 --- /dev/null +++ b/libs/config/embed.rs @@ -0,0 +1,37 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn get_embed_model_base_url(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_EMBED_MODEL_BASE_URL") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_EMBED_MODEL_BASE_URL not found")) + } + pub fn get_embed_model_dimensions(&self) -> anyhow::Result { + if let Some(dimensions) = self.env.get("APP_EMBED_MODEL_DIMENSIONS") { + return Ok(dimensions.parse::()?); + } + Err(anyhow::anyhow!("APP_EMBED_MODEL_DIMENSIONS not found")) + } + pub fn get_embed_model_api_key(&self) -> anyhow::Result { + if let Some(api_key) = self.env.get("APP_EMBED_MODEL_API_KEY") { + return Ok(api_key.to_string()); + } + Err(anyhow::anyhow!("APP_EMBED_MODEL_API_KEY not found")) + } + pub fn get_embed_model_name(&self) -> anyhow::Result { + if let Some(model_name) = self.env.get("APP_EMBED_MODEL_NAME") { + return Ok(model_name.to_string()); + } + Err(anyhow::anyhow!("APP_EMBED_MODEL_NAME not found")) + } + pub fn get_qdrant_url(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_QDRANT_URL") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_QDRANT_URL not found")) + } + pub fn get_qdrant_api_key(&self) -> Option { + self.env.get("APP_QDRANT_API_KEY").map(|s| s.to_string()) + } +} diff --git a/libs/config/hook.rs b/libs/config/hook.rs new file mode 100644 index 0000000..51c5544 --- /dev/null +++ b/libs/config/hook.rs @@ -0,0 +1,88 @@ +use crate::AppConfig; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PoolConfig { + pub max_concurrent: usize, + pub cpu_threshold: f32, + /// Hash-tag-prefixed Redis key prefix for hook task queues. + /// Example: "{hook}" — full keys will be "{hook}:sync", "{hook}:sync:work", etc. + pub redis_list_prefix: String, + /// Redis channel for task logs (PubSub). + pub redis_log_channel: String, + /// BLMOVE blocking timeout in seconds (0 = infinite). + pub redis_block_timeout_secs: u64, + /// Max retry attempts before discarding a failed task. + pub redis_max_retries: usize, + pub worker_id: String, +} + +impl PoolConfig { + pub fn from_env(config: &AppConfig) -> Self { + let max_concurrent = config + .env + .get("HOOK_POOL_MAX_CONCURRENT") + .and_then(|v| v.parse().ok()) + .unwrap_or_else(num_cpus::get); + + let cpu_threshold = config + .env + .get("HOOK_POOL_CPU_THRESHOLD") + .and_then(|v| v.parse().ok()) + .unwrap_or(80.0); + + let redis_list_prefix = config + .env + .get("HOOK_POOL_REDIS_LIST_PREFIX") + .cloned() + .unwrap_or_else(|| "{hook}".to_string()); + + let redis_log_channel = config + .env + .get("HOOK_POOL_REDIS_LOG_CHANNEL") + .cloned() + .unwrap_or_else(|| "hook:logs".to_string()); + + let redis_block_timeout_secs = config + .env + .get("HOOK_POOL_REDIS_BLOCK_TIMEOUT") + .and_then(|v| v.parse().ok()) + .unwrap_or(5); + + let redis_max_retries = config + .env + .get("HOOK_POOL_REDIS_MAX_RETRIES") + .and_then(|v| v.parse().ok()) + .unwrap_or(3); + + let worker_id = config + .env + .get("HOOK_POOL_WORKER_ID") + .cloned() + .unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); + + Self { + max_concurrent, + cpu_threshold, + redis_list_prefix, + redis_log_channel, + redis_block_timeout_secs, + redis_max_retries, + worker_id, + } + } +} + +impl Default for PoolConfig { + fn default() -> Self { + Self { + max_concurrent: num_cpus::get(), + cpu_threshold: 80.0, + redis_list_prefix: "{hook}".to_string(), + redis_log_channel: "hook:logs".to_string(), + redis_block_timeout_secs: 5, + redis_max_retries: 3, + worker_id: uuid::Uuid::new_v4().to_string(), + } + } +} diff --git a/libs/config/lib.rs b/libs/config/lib.rs new file mode 100644 index 0000000..e2ca702 --- /dev/null +++ b/libs/config/lib.rs @@ -0,0 +1,49 @@ +use std::collections::HashMap; +use std::sync::OnceLock; + +pub static GLOBAL_CONFIG: OnceLock = OnceLock::new(); + +#[derive(Clone, Debug)] +pub struct AppConfig { + pub env: HashMap, +} + +impl AppConfig { + const ENV_FILES: &'static [&'static str] = &[".env", ".env.local"]; + pub fn load() -> AppConfig { + let mut env = HashMap::new(); + for env_file in AppConfig::ENV_FILES { + dotenvy::from_path(env_file).ok(); + if let Ok(env_file_content) = std::fs::read_to_string(env_file) { + for line in env_file_content.lines() { + if let Some((key, value)) = line.split_once('=') { + env.insert(key.to_string(), value.to_string()); + } + } + } + } + env = env.into_iter().chain(std::env::vars()).collect(); + let this = AppConfig { env }; + if let Err(config) = GLOBAL_CONFIG.set(this) { + eprintln!("Failed to set global config: {:?}", config); + } + if let Some(config) = GLOBAL_CONFIG.get() { + config.clone() + } else { + panic!("Failed to get global config"); + } + } +} + +pub mod ai; +pub mod app; +pub mod avatar; +pub mod database; +pub mod domain; +pub mod embed; +pub mod hook; +pub mod logs; +pub mod qdrant; +pub mod redis; +pub mod smtp; +pub mod ssh; diff --git a/libs/config/logs.rs b/libs/config/logs.rs new file mode 100644 index 0000000..60eaccf --- /dev/null +++ b/libs/config/logs.rs @@ -0,0 +1,94 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn log_level(&self) -> anyhow::Result { + if let Some(level) = self.env.get("APP_LOG_LEVEL") { + return Ok(level.to_string()); + } + Ok("info".to_string()) + } + + pub fn log_format(&self) -> anyhow::Result { + if let Some(format) = self.env.get("APP_LOG_FORMAT") { + return Ok(format.to_string()); + } + Ok("json".to_string()) + } + + pub fn log_file_enabled(&self) -> anyhow::Result { + if let Some(enabled) = self.env.get("APP_LOG_FILE_ENABLED") { + return Ok(enabled.parse::()?); + } + Ok(false) + } + + pub fn log_file_path(&self) -> anyhow::Result { + if let Some(path) = self.env.get("APP_LOG_FILE_PATH") { + return Ok(path.to_string()); + } + Ok("./logs".to_string()) + } + + pub fn log_file_rotation(&self) -> anyhow::Result { + if let Some(rotation) = self.env.get("APP_LOG_FILE_ROTATION") { + return Ok(rotation.to_string()); + } + Ok("daily".to_string()) + } + + pub fn log_file_max_files(&self) -> anyhow::Result { + if let Some(max_files) = self.env.get("APP_LOG_FILE_MAX_FILES") { + return Ok(max_files.parse::()?); + } + Ok(7) + } + + pub fn log_file_max_size(&self) -> anyhow::Result { + if let Some(max_size) = self.env.get("APP_LOG_FILE_MAX_SIZE") { + return Ok(max_size.parse::()?); + } + Ok(104857600) // 100MB + } + + pub fn otel_enabled(&self) -> anyhow::Result { + if let Some(enabled) = self.env.get("APP_OTEL_ENABLED") { + return Ok(enabled.parse::()?); + } + Ok(false) + } + + pub fn otel_endpoint(&self) -> anyhow::Result { + if let Some(endpoint) = self.env.get("APP_OTEL_ENDPOINT") { + return Ok(endpoint.to_string()); + } + Ok("http://localhost:5080/api/default/v1/traces".to_string()) + } + + pub fn otel_service_name(&self) -> anyhow::Result { + if let Some(service_name) = self.env.get("APP_OTEL_SERVICE_NAME") { + return Ok(service_name.to_string()); + } + Ok(env!("CARGO_PKG_NAME").to_string()) + } + + pub fn otel_service_version(&self) -> anyhow::Result { + if let Some(service_version) = self.env.get("APP_OTEL_SERVICE_VERSION") { + return Ok(service_version.to_string()); + } + Ok(env!("CARGO_PKG_VERSION").to_string()) + } + + pub fn otel_authorization(&self) -> anyhow::Result> { + if let Some(authorization) = self.env.get("APP_OTEL_AUTHORIZATION") { + return Ok(Some(authorization.to_string())); + } + Ok(None) + } + + pub fn otel_organization(&self) -> anyhow::Result> { + if let Some(organization) = self.env.get("APP_OTEL_ORGANIZATION") { + return Ok(Some(organization.to_string())); + } + Ok(None) + } +} diff --git a/libs/config/qdrant.rs b/libs/config/qdrant.rs new file mode 100644 index 0000000..a507f08 --- /dev/null +++ b/libs/config/qdrant.rs @@ -0,0 +1,17 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn qdrant_url(&self) -> anyhow::Result { + if let Some(url) = self.env.get("APP_QDRANT_URL") { + return Ok(url.to_string()); + } + Err(anyhow::anyhow!("APP_QDRANT_URL not found")) + } + + pub fn qdrant_api_key(&self) -> anyhow::Result> { + if let Some(api_key) = self.env.get("APP_QDRANT_API_KEY") { + return Ok(Some(api_key.to_string())); + } + Ok(None) + } +} diff --git a/libs/config/redis.rs b/libs/config/redis.rs new file mode 100644 index 0000000..a5c2c86 --- /dev/null +++ b/libs/config/redis.rs @@ -0,0 +1,34 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn redis_urls(&self) -> anyhow::Result> { + if let Some(urls) = self.env.get("APP_REDIS_URLS") { + return Ok(urls.split(',').map(|s| s.trim().to_string()).collect()); + } + if let Some(url) = self.env.get("APP_REDIS_URL") { + return Ok(vec![url.to_string()]); + } + Err(anyhow::anyhow!("APP_REDIS_URLS or APP_REDIS_URL not found")) + } + + pub fn redis_pool_size(&self) -> anyhow::Result { + if let Some(pool_size) = self.env.get("APP_REDIS_POOL_SIZE") { + return Ok(pool_size.parse::()?); + } + Ok(10) + } + + pub fn redis_connect_timeout(&self) -> anyhow::Result { + if let Some(timeout) = self.env.get("APP_REDIS_CONNECT_TIMEOUT") { + return Ok(timeout.parse::()?); + } + Ok(5) + } + + pub fn redis_acquire_timeout(&self) -> anyhow::Result { + if let Some(timeout) = self.env.get("APP_REDIS_ACQUIRE_TIMEOUT") { + return Ok(timeout.parse::()?); + } + Ok(5) + } +} diff --git a/libs/config/smtp.rs b/libs/config/smtp.rs new file mode 100644 index 0000000..77fab34 --- /dev/null +++ b/libs/config/smtp.rs @@ -0,0 +1,52 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn smtp_host(&self) -> anyhow::Result { + if let Some(host) = self.env.get("APP_SMTP_HOST") { + return Ok(host.to_string()); + } + Err(anyhow::anyhow!("APP_SMTP_HOST not found")) + } + + pub fn smtp_port(&self) -> anyhow::Result { + if let Some(port) = self.env.get("APP_SMTP_PORT") { + return Ok(port.parse::()?); + } + Ok(587) + } + + pub fn smtp_username(&self) -> anyhow::Result { + if let Some(username) = self.env.get("APP_SMTP_USERNAME") { + return Ok(username.to_string()); + } + Err(anyhow::anyhow!("APP_SMTP_USERNAME not found")) + } + + pub fn smtp_password(&self) -> anyhow::Result { + if let Some(password) = self.env.get("APP_SMTP_PASSWORD") { + return Ok(password.to_string()); + } + Err(anyhow::anyhow!("APP_SMTP_PASSWORD not found")) + } + + pub fn smtp_from(&self) -> anyhow::Result { + if let Some(from) = self.env.get("APP_SMTP_FROM") { + return Ok(from.to_string()); + } + Err(anyhow::anyhow!("APP_SMTP_FROM not found")) + } + + pub fn smtp_tls(&self) -> anyhow::Result { + if let Some(tls) = self.env.get("APP_SMTP_TLS") { + return Ok(tls.parse::()?); + } + Ok(true) + } + + pub fn smtp_timeout(&self) -> anyhow::Result { + if let Some(timeout) = self.env.get("APP_SMTP_TIMEOUT") { + return Ok(timeout.parse::()?); + } + Ok(30) + } +} diff --git a/libs/config/ssh.rs b/libs/config/ssh.rs new file mode 100644 index 0000000..a767c11 --- /dev/null +++ b/libs/config/ssh.rs @@ -0,0 +1,38 @@ +use crate::AppConfig; + +impl AppConfig { + pub fn ssh_domain(&self) -> anyhow::Result { + if let Some(ssh_domain) = self.env.get("APP_SSH_DOMAIN") { + return Ok(ssh_domain.to_string()); + } + let main_domain = self.main_domain()?; + if let Some(stripped) = main_domain.strip_prefix("https://") { + Ok(stripped.to_string()) + } else if let Some(stripped) = main_domain.strip_prefix("http://") { + Ok(stripped.to_string()) + } else { + Ok(main_domain) + } + } + + pub fn ssh_port(&self) -> anyhow::Result { + if let Some(ssh_port) = self.env.get("APP_SSH_PORT") { + return Ok(ssh_port.parse::()?); + } + Ok(8022) + } + + pub fn ssh_server_private_key(&self) -> anyhow::Result { + if let Some(private_key) = self.env.get("APP_SSH_SERVER_PRIVATE_KEY") { + return Ok(private_key.to_string()); + } + Ok("".to_string()) + } + + pub fn ssh_server_public_key(&self) -> anyhow::Result { + if let Some(public_key) = self.env.get("APP_SSH_SERVER_PUBLIC_KEY") { + return Ok(public_key.to_string()); + } + Ok("".to_string()) + } +} diff --git a/libs/db/Cargo.toml b/libs/db/Cargo.toml new file mode 100644 index 0000000..6957bed --- /dev/null +++ b/libs/db/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "db" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "db" +[dependencies] +sea-orm = { workspace = true, features = ["sqlx-all", "runtime-tokio"] } +deadpool-redis = { workspace = true, features = ["rt_tokio_1", "cluster-async", "cluster"] } +config = { workspace = true } +anyhow = { workspace = true } +rand = { workspace = true, features = [] } +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +async-trait = { workspace = true } +[lints] +workspace = true diff --git a/libs/db/cache.rs b/libs/db/cache.rs new file mode 100644 index 0000000..8a3c5aa --- /dev/null +++ b/libs/db/cache.rs @@ -0,0 +1,37 @@ +use config::AppConfig; +use deadpool_redis::cluster::{Connection, Manager, Pool}; + +#[derive(Clone)] +pub struct AppCache { + pool: Pool, + /// Single Redis URL for pub/sub (SUBSCRIBE needs a dedicated connection, can't use cluster). + redis_url: String, +} + +impl AppCache { + pub async fn init(cfg: &AppConfig) -> anyhow::Result { + let urls = cfg.redis_urls()?; + let pool_size = cfg.redis_pool_size()?; + let conn = Manager::new(urls.clone(), true)?; + let pool = deadpool_redis::cluster::Pool::builder(conn) + .max_size(pool_size as usize) + .build()?; + let redis_url = urls + .first() + .cloned() + .unwrap_or_else(|| "redis://127.0.0.1:6379".to_string()); + + Ok(Self { pool, redis_url }) + } + pub async fn conn(&self) -> anyhow::Result { + Ok(self.pool.get().await?) + } + + pub fn redis_pool(&self) -> &Pool { + &self.pool + } + + pub fn redis_url(&self) -> &str { + &self.redis_url + } +} diff --git a/libs/db/database.rs b/libs/db/database.rs new file mode 100644 index 0000000..c4f043b --- /dev/null +++ b/libs/db/database.rs @@ -0,0 +1,194 @@ +use config::AppConfig; +use rand::random_range; +use sea_orm::prelude::async_trait::async_trait; +use sea_orm::{ + ConnectionTrait, Database, DatabaseConnection, DatabaseTransaction, DbBackend, DbErr, + ExecResult, QueryResult, Statement, TransactionTrait, +}; +use std::time::Duration; + +#[derive(Clone)] +pub struct AppDatabase { + db_write: DatabaseConnection, + db_read: Vec, +} + +impl AppDatabase { + pub async fn init(cfg: &AppConfig) -> anyhow::Result { + let db_url = cfg.database_url()?; + let max_connections = cfg.database_max_connections()?; + let min_connections = cfg.database_min_connections()?; + let idle_timeout = cfg.database_idle_timeout()?; + let max_lifetime = cfg.database_max_lifetime()?; + let connection_timeout = cfg.database_connection_timeout()?; + let schema_search_path = cfg.database_schema_search_path()?; + let read_replicas = cfg.database_read_replicas()?; + + let conn_cfg = sea_orm::ConnectOptions::new(db_url) + .max_connections(max_connections) + .min_connections(min_connections) + .idle_timeout(Duration::from_secs(idle_timeout)) + .max_lifetime(Duration::from_secs(max_lifetime)) + .connect_timeout(Duration::from_secs(connection_timeout)) + .set_schema_search_path(schema_search_path) + .sqlx_logging(false) + .to_owned(); + + let db_write = Database::connect(conn_cfg).await?; + + let mut db_read = vec![]; + for replica in read_replicas { + let conn_cfg = sea_orm::ConnectOptions::new(replica.clone()) + .max_connections(max_connections) + .min_connections(min_connections) + .idle_timeout(Duration::from_secs(idle_timeout)) + .max_lifetime(Duration::from_secs(max_lifetime)) + .connect_timeout(Duration::from_secs(connection_timeout)) + .to_owned(); + + let conn = Database::connect(conn_cfg).await?; + db_read.push(conn); + } + + Ok(Self { db_write, db_read }) + } + + pub fn writer(&self) -> &DatabaseConnection { + &self.db_write + } + + pub fn reader(&self) -> &DatabaseConnection { + if self.db_read.is_empty() { + return &self.db_write; + } + + &self.db_read[random_range(0..self.db_read.len())] + } + + pub async fn begin(&self) -> Result { + let txn = self.db_write.begin().await?; + Ok(AppTransaction { inner: txn }) + } +} + +pub struct AppTransaction { + inner: DatabaseTransaction, +} + +impl AppTransaction { + pub async fn commit(self) -> Result<(), DbErr> { + self.inner.commit().await + } + pub async fn rollback(self) -> Result<(), DbErr> { + self.inner.rollback().await + } +} +#[async_trait] +impl ConnectionTrait for AppTransaction { + fn get_database_backend(&self) -> DbBackend { + self.inner.get_database_backend() + } + + async fn execute_raw(&self, stmt: Statement) -> Result { + self.inner.execute_raw(stmt).await + } + + async fn execute_unprepared(&self, sql: &str) -> Result { + self.inner.execute_unprepared(sql).await + } + + async fn query_one_raw(&self, stmt: Statement) -> Result, DbErr> { + self.inner.query_one_raw(stmt).await + } + + async fn query_all_raw(&self, stmt: Statement) -> Result, DbErr> { + self.inner.query_all_raw(stmt).await + } +} + +#[async_trait] +impl ConnectionTrait for AppDatabase { + fn get_database_backend(&self) -> DbBackend { + self.db_write.get_database_backend() + } + + async fn execute_raw(&self, stmt: Statement) -> Result { + if is_force_write(&stmt.sql) { + return self.db_write.execute_raw(stmt).await; + } + + if is_read_query(&stmt.sql) { + return self.reader().execute_raw(stmt).await; + } + + self.db_write.execute_raw(stmt).await + } + + async fn execute_unprepared(&self, sql: &str) -> Result { + if is_read_query(sql) { + self.reader().execute_unprepared(sql).await + } else { + self.db_write.execute_unprepared(sql).await + } + } + + async fn query_one_raw(&self, stmt: Statement) -> Result, DbErr> { + if is_force_write(&stmt.sql) { + return self.db_write.query_one_raw(stmt).await; + } + + if is_read_query(&stmt.sql) { + return self.reader().query_one_raw(stmt).await; + } + + self.db_write.query_one_raw(stmt).await + } + + async fn query_all_raw(&self, stmt: Statement) -> Result, DbErr> { + if is_force_write(&stmt.sql) { + return self.db_write.query_all_raw(stmt).await; + } + + if is_read_query(&stmt.sql) { + return self.reader().query_all_raw(stmt).await; + } + + self.db_write.query_all_raw(stmt).await + } +} + +fn is_force_write(sql: &str) -> bool { + sql.contains("/*+ write */") +} + +fn is_force_read(sql: &str) -> bool { + sql.contains("/*+ read */") +} + +fn is_read_query(sql: &str) -> bool { + if is_force_write(sql) { + return false; + } + if is_force_read(sql) { + return true; + } + let sql = strip_comments(sql).to_lowercase(); + if sql.contains("for update") || sql.contains("for share") { + return false; + } + + match sql.split_whitespace().next() { + Some("select") | Some("show") | Some("desc") | Some("describe") | Some("explain") => true, + _ => false, + } +} + +fn strip_comments(sql: &str) -> String { + sql.lines() + .filter(|l| { + let l = l.trim_start(); + !l.starts_with("--") && !l.starts_with("/*") + }) + .collect::>() + .join(" ") +} diff --git a/libs/db/lib.rs b/libs/db/lib.rs new file mode 100644 index 0000000..d9c379b --- /dev/null +++ b/libs/db/lib.rs @@ -0,0 +1,2 @@ +pub mod cache; +pub mod database; diff --git a/libs/email/Cargo.toml b/libs/email/Cargo.toml new file mode 100644 index 0000000..9779194 --- /dev/null +++ b/libs/email/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "email" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +name = "email" +path = "lib.rs" +[dependencies] +config = { workspace = true } +lettre = { workspace = true } +tokio = { workspace = true, features = ["rt-multi-thread", "rt"] } +serde = { workspace = true, features = ["derive"] } +anyhow = { workspace = true } +regex = { workspace = true } +[lints] +workspace = true diff --git a/libs/email/lib.rs b/libs/email/lib.rs new file mode 100644 index 0000000..8c6d5ea --- /dev/null +++ b/libs/email/lib.rs @@ -0,0 +1,77 @@ +use config::AppConfig; +use lettre::message::Mailbox; +use lettre::transport::smtp::authentication::Credentials; +use lettre::transport::smtp::client::Tls; +use lettre::{SmtpTransport, Transport}; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use std::sync::LazyLock; +use std::time::Duration; + +#[derive(Clone)] +pub struct AppEmail { + pub cred: Credentials, + pub mailer: SmtpTransport, + pub from: Mailbox, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct EmailMessage { + pub to: String, + pub subject: String, + pub body: String, +} + +static EMAIL_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$").unwrap()); + +impl AppEmail { + pub async fn init(cfg: &AppConfig) -> anyhow::Result { + let smtp_host = cfg.smtp_host()?; + let smtp_port = cfg.smtp_port()?; + let smtp_username = cfg.smtp_username()?; + let smtp_password = cfg.smtp_password()?; + let smtp_from = cfg.smtp_from()?; + let smtp_tls = cfg.smtp_tls()?; + let smtp_timeout = cfg.smtp_timeout()?; + let cred = Credentials::new(smtp_username, smtp_password); + let tls_param = if smtp_tls { + Tls::Required( + lettre::transport::smtp::client::TlsParameters::builder(smtp_host.clone()) + .build() + .map_err(|e| anyhow::anyhow!("Failed to build TLS parameters: {}", e))?, + ) + } else { + Tls::None + }; + + let mailer = SmtpTransport::builder_dangerous(smtp_host) + .port(smtp_port) + .tls(tls_param) + .timeout(Some(Duration::from_secs(smtp_timeout))) + .credentials(cred.clone()) + .build(); + Ok(AppEmail { + cred, + mailer, + from: smtp_from + .parse() + .map_err(|e| anyhow::anyhow!("Invalid from address: {}", e))?, + }) + } + pub async fn send(&self, msg: EmailMessage) -> anyhow::Result<()> { + if !EMAIL_REGEX.is_match(&msg.to) { + return Err(anyhow::anyhow!("Invalid email address format: {}", msg.to)); + } + + let email = lettre::Message::builder() + .from(self.from.clone()) + .to(msg.to.parse()?) + .subject(msg.subject) + .body(msg.body)?; + self.mailer + .send(&email) + .map_err(|e| anyhow::anyhow!("{}", e))?; + Ok(()) + } +} diff --git a/libs/git/Cargo.toml b/libs/git/Cargo.toml new file mode 100644 index 0000000..9d0b62c --- /dev/null +++ b/libs/git/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "git" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "git" +[dependencies] +git2 = { workspace = true, features = [] } +git2-hooks = { workspace = true, features = [] } +git2-ext = { workspace = true, features = [] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tar = { workspace = true } +flate2 = { workspace = true } +zip = { workspace = true } +globset = { workspace = true } +models = { workspace = true } +db = { workspace = true } +deadpool-redis = { workspace = true, features = ["rt_tokio_1", "cluster-async", "cluster"] } +config = { workspace = true } +slog = { workspace = true } +tokio = { workspace = true, features = ["sync", "rt", "process"] } +tokio-util = { workspace = true } +qdrant-client = { workspace = true } +redis = { workspace = true } +uuid = { workspace = true, features = ["v4"] } +sea-orm = { workspace = true, features = ["macros"] } +chrono = { workspace = true } +sysinfo = { workspace = true } +num_cpus = { workspace = true } +futures = { workspace = true } +russh = { workspace = true, features = ["flate2", "ring", "legacy-ed25519-pkcs8-parser"] } +anyhow = { workspace = true } +base64 = { workspace = true } +sha1 = { workspace = true } +sha2 = { workspace = true } +futures-util = { workspace = true } +async-stream = { workspace = true } +ssh-key = { workspace = true } +actix-web = { workspace = true } +hex = "0.4.3" +reqwest = { workspace = true } +[lints] +workspace = true diff --git a/libs/git/archive/mod.rs b/libs/git/archive/mod.rs new file mode 100644 index 0000000..c11e298 --- /dev/null +++ b/libs/git/archive/mod.rs @@ -0,0 +1,3 @@ +//! Archive domain — generate .tar, .tar.gz, and .zip archives from git trees. +pub mod ops; +pub mod types; diff --git a/libs/git/archive/ops.rs b/libs/git/archive/ops.rs new file mode 100644 index 0000000..df076f4 --- /dev/null +++ b/libs/git/archive/ops.rs @@ -0,0 +1,552 @@ +//! Archive operations. +//! +//! Generates .tar, .tar.gz, and .zip archives from git trees with caching support. + +use std::fs; +use std::io::{Cursor, Write}; +use std::path::PathBuf; + +use flate2::Compression; +use flate2::write::GzEncoder; + +use crate::archive::types::{ArchiveEntry, ArchiveFormat, ArchiveOptions, ArchiveSummary}; +use crate::commit::types::CommitOid; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + /// Directory where cached archives are stored. + fn archive_cache_dir(&self) -> PathBuf { + PathBuf::from(self.repo().path()).join(".git-archives") + } + + /// Path to the cached archive file for a given commit/format/options. + fn archive_cache_path( + &self, + commit_oid: &CommitOid, + format: ArchiveFormat, + opts: &ArchiveOptions, + ) -> PathBuf { + let ext = match format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + let key = opts.cache_key(); + self.archive_cache_dir() + .join(format!("{}{}.{}", commit_oid.as_str(), key, ext)) + } + + /// Ensure the cache directory exists. + fn ensure_archive_cache_dir(&self) -> GitResult<()> { + let dir = self.archive_cache_dir(); + if !dir.exists() { + fs::create_dir_all(&dir).map_err(|e| GitError::IoError(e.to_string()))?; + } + Ok(()) + } + + /// Generate a plain tar archive from a commit's tree. + /// Caches the result after first build. + pub fn archive_tar( + &self, + commit_oid: &CommitOid, + opts: Option, + ) -> GitResult> { + let opts = opts.unwrap_or_default(); + let cache_path = self.archive_cache_path(commit_oid, ArchiveFormat::Tar, &opts); + + if cache_path.exists() { + return fs::read(&cache_path).map_err(|e| GitError::IoError(e.to_string())); + } + + let tree = self.tree_from_commit(commit_oid)?; + let mut buf = Vec::new(); + let base = opts.prefix.as_deref().unwrap_or(""); + self.walk_tar(&mut buf, &tree, base, &opts)?; + + self.ensure_archive_cache_dir()?; + fs::write(&cache_path, &buf).map_err(|e| GitError::IoError(e.to_string()))?; + + Ok(buf) + } + + /// Generate a tar.gz archive from a commit's tree. + /// Caches the result after first build. + pub fn archive_tar_gz( + &self, + commit_oid: &CommitOid, + opts: Option, + ) -> GitResult> { + let opts = opts.unwrap_or_default(); + let cache_path = self.archive_cache_path(commit_oid, ArchiveFormat::TarGz, &opts); + + if cache_path.exists() { + return fs::read(&cache_path).map_err(|e| GitError::IoError(e.to_string())); + } + + let tree = self.tree_from_commit(commit_oid)?; + let mut buf = Vec::new(); + { + let encoder = GzEncoder::new(&mut buf, Compression::default()); + let mut builder = tar::Builder::new(encoder); + let base = opts.prefix.as_deref().unwrap_or(""); + self.walk_tar_builder(&mut builder, &tree, base, &opts)?; + let encoder = builder + .into_inner() + .map_err(|e| GitError::Internal(e.to_string()))?; + encoder + .finish() + .map_err(|e| GitError::Internal(e.to_string()))?; + } + + self.ensure_archive_cache_dir()?; + fs::write(&cache_path, &buf).map_err(|e| GitError::IoError(e.to_string()))?; + + Ok(buf) + } + + /// Generate a zip archive from a commit's tree. + /// Caches the result after first build. + pub fn archive_zip( + &self, + commit_oid: &CommitOid, + opts: Option, + ) -> GitResult> { + let opts = opts.unwrap_or_default(); + let cache_path = self.archive_cache_path(commit_oid, ArchiveFormat::Zip, &opts); + + if cache_path.exists() { + return fs::read(&cache_path).map_err(|e| GitError::IoError(e.to_string())); + } + + let tree = self.tree_from_commit(commit_oid)?; + let mut zip_buf = Vec::new(); + let base = opts.prefix.as_deref().unwrap_or(""); + self.walk_zip(&mut zip_buf, &tree, base, &opts)?; + + self.ensure_archive_cache_dir()?; + fs::write(&cache_path, &zip_buf).map_err(|e| GitError::IoError(e.to_string()))?; + + Ok(zip_buf) + } + + /// Generate an archive in the specified format. + /// Results are cached keyed by (commit_oid, format, options). + pub fn archive( + &self, + commit_oid: &CommitOid, + format: ArchiveFormat, + opts: Option, + ) -> GitResult> { + match format { + ArchiveFormat::Tar => self.archive_tar(commit_oid, opts), + ArchiveFormat::TarGz => self.archive_tar_gz(commit_oid, opts), + ArchiveFormat::Zip => self.archive_zip(commit_oid, opts), + } + } + + /// List all entries that would be included in an archive. + pub fn archive_list( + &self, + commit_oid: &CommitOid, + opts: Option, + ) -> GitResult> { + let tree = self.tree_from_commit(commit_oid)?; + let opts = opts.unwrap_or_default(); + let mut entries = Vec::new(); + self.collect_tree_entries(&mut entries, &tree, "", 0, &opts)?; + Ok(entries) + } + + pub fn archive_summary( + &self, + commit_oid: &CommitOid, + format: ArchiveFormat, + opts: Option, + ) -> GitResult { + let entries = self.archive_list(commit_oid, opts)?; + let total_size: u64 = entries.iter().map(|e| e.size).sum(); + Ok(ArchiveSummary { + commit_oid: commit_oid.to_string(), + format, + total_entries: entries.len(), + total_size, + }) + } + + pub fn archive_cached( + &self, + commit_oid: &CommitOid, + format: ArchiveFormat, + opts: Option, + ) -> bool { + let opts = opts.unwrap_or_default(); + self.archive_cache_path(commit_oid, format, &opts).exists() + } + + /// Invalidate (delete) a cached archive, if it exists. + /// Call this when you need a fresh build after the repo state changes. + pub fn archive_invalidate( + &self, + commit_oid: &CommitOid, + format: ArchiveFormat, + opts: Option, + ) -> GitResult { + let opts = opts.unwrap_or_default(); + let path = self.archive_cache_path(commit_oid, format, &opts); + if path.exists() { + fs::remove_file(&path).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(true) + } else { + Ok(false) + } + } + + /// List all cached archive paths for a given commit. + pub fn archive_cache_list(&self, commit_oid: &CommitOid) -> GitResult> { + let dir = self.archive_cache_dir(); + if !dir.exists() { + return Ok(Vec::new()); + } + let prefix = commit_oid.as_str(); + let mut paths = Vec::new(); + for entry in fs::read_dir(&dir).map_err(|e| GitError::IoError(e.to_string()))? { + let entry = entry.map_err(|e| GitError::IoError(e.to_string()))?; + let name = entry.file_name(); + let name = name.to_string_lossy(); + if name.starts_with(prefix) { + paths.push(entry.path()); + } + } + Ok(paths) + } + + /// Invalidate all cached archives for a given commit. + pub fn archive_invalidate_all(&self, commit_oid: &CommitOid) -> GitResult { + let paths = self.archive_cache_list(commit_oid)?; + let count = paths.len(); + for p in paths { + fs::remove_file(&p).map_err(|e| GitError::IoError(e.to_string()))?; + } + Ok(count) + } + + fn tree_from_commit(&self, commit_oid: &CommitOid) -> GitResult> { + let oid = commit_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(commit_oid.to_string()))?; + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string())) + } + + fn walk_tar( + &self, + buf: &mut Vec, + tree: &git2::Tree<'_>, + base: &str, + opts: &ArchiveOptions, + ) -> GitResult<()> { + for entry in tree.iter() { + let name = entry.name().unwrap_or(""); + let full_path = if base.is_empty() { + name.to_string() + } else { + format!("{}/{}", base, name) + }; + + if !self.entry_passes_filter(&full_path, opts) { + continue; + } + + let oid = entry.id(); + let obj = match self.repo().find_object(oid, None) { + Ok(o) => o, + Err(_) => continue, + }; + + let mode = entry.filemode() as u32; + if obj.kind() == Some(git2::ObjectType::Tree) { + if opts + .max_depth + .map_or(true, |d| full_path.matches('/').count() < d) + { + let sub_tree = self + .repo() + .find_tree(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.walk_tar(buf, &sub_tree, &full_path, opts)?; + } + } else { + let blob = match obj.as_blob() { + Some(b) => b, + None => continue, + }; + let content = blob.content(); + let size = content.len() as u64; + + let mut header = [0u8; 512]; + let path_bytes = full_path.as_bytes(); + // tar USTAR format: prefix (≤155) + "/" + name (≤100) = max 255 bytes. + // Split at the last "/" that keeps prefix ≤ 155. Fall back to truncation error. + const NAME_MAX: usize = 100; + const PREFIX_MAX: usize = 155; + if path_bytes.len() <= NAME_MAX { + // Fits directly in name field. + header[..path_bytes.len()].copy_from_slice(path_bytes); + } else if path_bytes.len() <= PREFIX_MAX + 1 + NAME_MAX { + // Find last "/" that leaves prefix ≤ PREFIX_MAX. + let split_at = path_bytes[..path_bytes.len() - NAME_MAX] + .iter() + .rposition(|&b| b == b'/') + .map(|pos| pos + 1) + .unwrap_or(0); + let prefix_len = split_at; + let name_len = path_bytes.len() - split_at; + if prefix_len > PREFIX_MAX || name_len > NAME_MAX { + return Err(GitError::Internal(format!( + "path too long for tar format: {}", + full_path + ))); + } + header[..prefix_len].copy_from_slice(&path_bytes[..prefix_len]); + header[prefix_len..prefix_len + 1].copy_from_slice(b"/"); + header[prefix_len + 1..prefix_len + 1 + name_len] + .copy_from_slice(&path_bytes[prefix_len..]); + } else { + return Err(GitError::Internal(format!( + "path too long for tar format: {}", + full_path + ))); + } + let mode_octal = format!("{:o}", mode & 0o777); + header[100..108].copy_from_slice(mode_octal.as_bytes()); + let size_octal = format!("{:o}", size); + if size_octal.len() > 12 { + return Err(GitError::Internal(format!( + "file size {} exceeds maximum for tar format (12-byte octal field)", + size + ))); + } + header[124..136].copy_from_slice(size_octal.as_bytes()); + header[136..148].copy_from_slice(b"0 "); + header[148..156].copy_from_slice(b" "); + header[156] = b'0'; + header[257..265].copy_from_slice(b"ustar\0"); + + // Calculate checksum: sum all 512 bytes with checksum field filled with spaces. + let sum: u32 = header.iter().map(|&b| b as u32).sum::(); + // tar spec: 8-byte checksum field, formatted as 6 octal digits + space + null. + let sum_octal = format!("{:06o} \0", sum); + header[148..156].copy_from_slice(sum_octal.as_bytes()); + + buf.write_all(&header) + .map_err(|e| GitError::IoError(e.to_string()))?; + buf.write_all(content) + .map_err(|e| GitError::IoError(e.to_string()))?; + let written = 512 + content.len(); + let padding = (512 - written % 512) % 512; + if padding > 0 { + buf.write_all(&vec![0u8; padding]) + .map_err(|e| GitError::IoError(e.to_string()))?; + } + } + } + Ok(()) + } + + fn walk_tar_builder( + &self, + builder: &mut tar::Builder>>, + tree: &git2::Tree<'_>, + base: &str, + opts: &ArchiveOptions, + ) -> GitResult<()> { + for entry in tree.iter() { + let name = entry.name().unwrap_or(""); + let full_path = if base.is_empty() { + name.to_string() + } else { + format!("{}/{}", base, name) + }; + + if !self.entry_passes_filter(&full_path, opts) { + continue; + } + + let oid = entry.id(); + let obj = match self.repo().find_object(oid, None) { + Ok(o) => o, + Err(_) => continue, + }; + + let mode = entry.filemode() as u32; + if obj.kind() == Some(git2::ObjectType::Tree) { + if opts + .max_depth + .map_or(true, |d| full_path.matches('/').count() < d) + { + let sub_tree = self + .repo() + .find_tree(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.walk_tar_builder(builder, &sub_tree, &full_path, opts)?; + } + } else { + let blob = match obj.as_blob() { + Some(b) => b, + None => continue, + }; + let content = blob.content(); + + let mut header = tar::Header::new_gnu(); + header + .set_path(&full_path) + .map_err(|e| GitError::Internal(e.to_string()))?; + header.set_size(content.len() as u64); + header.set_mode(mode & 0o755); + header.set_cksum(); + + builder + .append(&header, content) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + } + Ok(()) + } + + fn walk_zip( + &self, + zip_buf: &mut Vec, + tree: &git2::Tree<'_>, + base: &str, + opts: &ArchiveOptions, + ) -> GitResult<()> { + let cursor = Cursor::new(zip_buf); + let mut zip = zip::ZipWriter::new(cursor); + zip = self.walk_zip_impl(zip, tree, base, opts)?; + let _cursor = zip + .finish() + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(()) + } + + fn walk_zip_impl<'a>( + &'a self, + mut zip: zip::ZipWriter>>, + tree: &git2::Tree<'_>, + base: &str, + opts: &ArchiveOptions, + ) -> GitResult>>> { + for entry in tree.iter() { + let name = entry.name().unwrap_or(""); + let full_path = if base.is_empty() { + name.to_string() + } else { + format!("{}/{}", base, name) + }; + + if !self.entry_passes_filter(&full_path, opts) { + continue; + } + + let oid = entry.id(); + let obj = match self.repo().find_object(oid, None) { + Ok(o) => o, + Err(_) => continue, + }; + + let mode = entry.filemode() as u32; + if obj.kind() == Some(git2::ObjectType::Tree) { + if opts + .max_depth + .map_or(true, |d| full_path.matches('/').count() < d) + { + let sub_tree = self + .repo() + .find_tree(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + zip = self.walk_zip_impl(zip, &sub_tree, &full_path, opts)?; + } + } else { + let blob = match obj.as_blob() { + Some(b) => b, + None => continue, + }; + let content = blob.content(); + let options = zip::write::SimpleFileOptions::default() + .compression_method(zip::CompressionMethod::Deflated) + .unix_permissions(mode & 0o755); + + zip.start_file(&full_path, options) + .map_err(|e| GitError::Internal(e.to_string()))?; + zip.write_all(content) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + } + Ok(zip) + } + + fn collect_tree_entries( + &self, + entries: &mut Vec, + tree: &git2::Tree<'_>, + prefix: &str, + depth: usize, + opts: &ArchiveOptions, + ) -> GitResult<()> { + for entry in tree.iter() { + let name = entry.name().unwrap_or(""); + let full_path = if prefix.is_empty() { + name.to_string() + } else { + format!("{}/{}", prefix, name) + }; + + if !self.entry_passes_filter(&full_path, opts) { + continue; + } + + if opts.max_depth.map_or(false, |d| depth >= d) { + continue; + } + + let oid = entry.id(); + let obj = match self.repo().find_object(oid, None) { + Ok(o) => o, + Err(_) => continue, + }; + + let mode = entry.filemode() as u32; + let size = obj.as_blob().map(|b| b.size() as u64).unwrap_or(0); + if obj.kind() == Some(git2::ObjectType::Tree) { + let sub_tree = self + .repo() + .find_tree(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.collect_tree_entries(entries, &sub_tree, &full_path, depth + 1, opts)?; + } else { + entries.push(ArchiveEntry { + path: full_path, + oid: oid.to_string(), + size, + mode, + }); + } + } + Ok(()) + } + + fn entry_passes_filter(&self, full_path: &str, opts: &ArchiveOptions) -> bool { + if let Some(ref filter) = opts.path_filter { + if !full_path.starts_with(filter) { + return false; + } + } + true + } +} diff --git a/libs/git/archive/types.rs b/libs/git/archive/types.rs new file mode 100644 index 0000000..7bcb8d1 --- /dev/null +++ b/libs/git/archive/types.rs @@ -0,0 +1,78 @@ +//! Serializable types for the archive domain. + +use serde::{Deserialize, Serialize}; + +/// The format of the archive. +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ArchiveFormat { + Tar, + TarGz, + Zip, +} + +/// Metadata for a single file in the archive. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ArchiveEntry { + pub path: String, + pub oid: String, + pub size: u64, + pub mode: u32, +} + +/// Summary of an archive. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ArchiveSummary { + pub commit_oid: String, + pub format: ArchiveFormat, + pub total_entries: usize, + pub total_size: u64, +} + +/// Options for archive generation. +#[derive(Debug, Clone, Default)] +pub struct ArchiveOptions { + /// Prefix path to prepend to all entries (e.g. "project-name/"). + pub prefix: Option, + /// Maximum directory depth to recurse into. + pub max_depth: Option, + /// Include only entries under this path prefix. + pub path_filter: Option, +} + +impl ArchiveOptions { + pub fn new() -> Self { + Self::default() + } + + pub fn prefix(mut self, p: &str) -> Self { + self.prefix = Some(p.to_string()); + self + } + + pub fn max_depth(mut self, d: usize) -> Self { + self.max_depth = Some(d); + self + } + + pub fn path_filter(mut self, p: &str) -> Self { + self.path_filter = Some(p.to_string()); + self + } + + /// Two Option sets with the same values produce the same key. + pub(crate) fn cache_key(&self) -> String { + let prefix = self.prefix.as_deref().unwrap_or(""); + let filter = self.path_filter.as_deref().unwrap_or(""); + let depth = self.max_depth.map_or("0".to_string(), |d| d.to_string()); + if prefix.is_empty() && filter.is_empty() && self.max_depth.is_none() { + String::new() + } else { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + let mut h = DefaultHasher::new(); + (prefix, filter, depth).hash(&mut h); + format!("-{:x}", h.finish()) + } + } +} diff --git a/libs/git/blame/mod.rs b/libs/git/blame/mod.rs new file mode 100644 index 0000000..02718c8 --- /dev/null +++ b/libs/git/blame/mod.rs @@ -0,0 +1,2 @@ +//! Blame domain — per-line commit attribution (git blame). +pub mod ops; diff --git a/libs/git/blame/ops.rs b/libs/git/blame/ops.rs new file mode 100644 index 0000000..78aec05 --- /dev/null +++ b/libs/git/blame/ops.rs @@ -0,0 +1,262 @@ +//! Blame operations. + +use crate::commit::types::{CommitBlameHunk, CommitBlameLine, CommitOid}; +use crate::{GitDomain, GitError, GitResult}; + +/// Options for blame operations. +#[derive(Debug, Clone, Default)] +pub struct BlameOptions { + pub min_line: Option, + pub max_line: Option, + pub track_copies_same_file: bool, + pub track_copies_same_commit_moves: bool, + pub ignore_whitespace: bool, +} + +impl BlameOptions { + pub fn new() -> Self { + Self::default() + } + + pub fn min_line(mut self, line: usize) -> Self { + self.min_line = Some(line); + self + } + + pub fn max_line(mut self, line: usize) -> Self { + self.max_line = Some(line); + self + } + + pub fn track_copies_same_file(mut self) -> Self { + self.track_copies_same_file = true; + self + } + + pub fn track_copies_same_commit_moves(mut self) -> Self { + self.track_copies_same_commit_moves = true; + self + } + + pub fn ignore_whitespace(mut self) -> Self { + self.ignore_whitespace = true; + self + } + + fn apply_to(&self, opts: &mut git2::BlameOptions) { + if let Some(min) = self.min_line { + opts.min_line(min); + } + if let Some(max) = self.max_line { + opts.max_line(max); + } + if self.track_copies_same_file { + opts.track_copies_same_file(true); + } + if self.track_copies_same_commit_moves { + opts.track_copies_same_commit_moves(true); + } + if self.ignore_whitespace { + opts.ignore_whitespace(true); + } + } +} + +impl GitDomain { + pub fn blame_file( + &self, + commit_oid: &CommitOid, + path: &str, + opts: Option, + ) -> GitResult> { + let oid = commit_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(commit_oid.to_string()))?; + + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let _tree = self + .repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut blame_opts = git2::BlameOptions::new(); + if let Some(ref o) = opts { + o.apply_to(&mut blame_opts); + } + + let blame = self + .repo() + .blame_file(std::path::Path::new(path), Some(&mut blame_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Use get_index to iterate hunks + let num_hunks = blame.len(); + let mut hunks: Vec = Vec::with_capacity(num_hunks); + + for i in 0..num_hunks { + if let Some(hunk) = blame.get_index(i) { + hunks.push(CommitBlameHunk { + commit_oid: CommitOid::from_git2(hunk.orig_commit_id()), + final_start_line: hunk.final_start_line() as u32, + final_lines: hunk.lines_in_hunk() as u32, + orig_start_line: hunk.orig_start_line() as u32, + // NOTE: git2 0.20.4 BlameHunk does not expose the original hunk line count. + // orig_lines_in_hunk() is not available, so we reuse lines_in_hunk(). + // The caller should prefer final_lines and orig_start_line for accuracy. + orig_lines: hunk.lines_in_hunk() as u32, + boundary: hunk.is_boundary(), + orig_path: hunk.path().map(|p| p.to_string_lossy().to_string()), + }); + } + } + + Ok(hunks) + } + + /// Alias for blame_file. + pub fn blame_path( + &self, + commit_oid: &CommitOid, + path: &str, + opts: Option, + ) -> GitResult> { + self.blame_file(commit_oid, path, opts) + } + + /// Reconstructs line attribution from hunk ranges. + pub fn blame_lines( + &self, + commit_oid: &CommitOid, + path: &str, + opts: Option, + ) -> GitResult> { + let oid = commit_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(commit_oid.to_string()))?; + + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut blame_opts = git2::BlameOptions::new(); + if let Some(ref o) = opts { + o.apply_to(&mut blame_opts); + } + + let blame = self + .repo() + .blame_file(std::path::Path::new(path), Some(&mut blame_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Get file content for line text + let tree = self + .repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let content_lines: Vec = String::from_utf8_lossy( + self.repo() + .find_blob( + tree.get_path(std::path::Path::new(path)) + .map_err(|_| { + GitError::ObjectNotFound(format!("file not found in commit: {}", path)) + })? + .id(), + ) + .map_err(|e| GitError::Internal(e.to_string()))? + .content(), + ) + .lines() + .map(String::from) + .collect(); + + // Collect hunks + let hunks: Vec<_> = (0..blame.len()) + .filter_map(|i| blame.get_index(i)) + .map(|h| { + ( + h.orig_commit_id(), + h.final_start_line(), + h.lines_in_hunk(), + h.path().map(|p| p.to_string_lossy().to_string()), + ) + }) + .collect(); + + let mut lines: Vec = Vec::with_capacity(content_lines.len()); + + for (line_idx, content) in content_lines.iter().enumerate() { + let line_no = line_idx as u32; + let hunk = hunks.iter().find(|(_, start, count, _)| { + let end = *start + *count; + line_idx + 1 >= *start && line_idx + 1 < end + }); + + let (commit_oid, orig_path) = hunk + .map(|(oid, _, _, path)| (CommitOid::from_git2(*oid), path.clone())) + .unwrap_or_else(|| { + ( + CommitOid::from_git2(git2::Oid::zero()), + Some(path.to_string()), + ) + }); + + lines.push(CommitBlameLine { + commit_oid, + line_no, + content: content.clone(), + orig_path, + }); + } + + Ok(lines) + } + + pub fn blame_hunk_at( + &self, + commit_oid: &CommitOid, + path: &str, + line_no: usize, + ) -> GitResult { + // Validate commit exists (git2 blame_file always operates on HEAD, + // so we validate commit for early error rather than semantic correctness). + let oid = commit_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(commit_oid.to_string()))?; + let _commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::ObjectNotFound(e.to_string()))?; + let mut blame_opts = git2::BlameOptions::new(); + let blame = self + .repo() + .blame_file(std::path::Path::new(path), Some(&mut blame_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Use get_line to find the hunk at the given line + let hunk_opt = blame.get_line(line_no); + + match hunk_opt { + Some(hunk) => Ok(CommitBlameHunk { + commit_oid: CommitOid::from_git2(hunk.orig_commit_id()), + final_start_line: hunk.final_start_line() as u32, + final_lines: hunk.lines_in_hunk() as u32, + orig_start_line: hunk.orig_start_line() as u32, + // NOTE: git2 0.20.4 BlameHunk does not expose the original hunk line count. + // orig_lines_in_hunk() is not available, so we reuse lines_in_hunk(). + orig_lines: hunk.lines_in_hunk() as u32, + boundary: hunk.is_boundary(), + orig_path: hunk.path().map(|p| p.to_string_lossy().to_string()), + }), + None => Err(GitError::Internal(format!( + "no blame hunk found for line {}", + line_no + ))), + } + } +} diff --git a/libs/git/blob/mod.rs b/libs/git/blob/mod.rs new file mode 100644 index 0000000..990d3d2 --- /dev/null +++ b/libs/git/blob/mod.rs @@ -0,0 +1,3 @@ +//! Blob domain — all blob-related operations on a GitDomain. +pub mod ops; +pub mod types; diff --git a/libs/git/blob/ops.rs b/libs/git/blob/ops.rs new file mode 100644 index 0000000..c6ed2e2 --- /dev/null +++ b/libs/git/blob/ops.rs @@ -0,0 +1,80 @@ +//! Blob operations. + +use std::path::Path; + +use crate::blob::types::{BlobContent, BlobInfo}; +use crate::commit::types::CommitOid; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn blob_get(&self, oid: &CommitOid) -> GitResult { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + + let blob = self + .repo() + .find_blob(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + + Ok(BlobInfo::from_git2(&blob)) + } + + pub fn blob_exists(&self, oid: &CommitOid) -> bool { + oid.to_oid() + .ok() + .and_then(|oid| self.repo.find_blob(oid).ok()) + .is_some() + } + + pub fn blob_is_binary(&self, oid: &CommitOid) -> GitResult { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + + let blob = self + .repo() + .find_blob(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + + Ok(blob.is_binary()) + } + + pub fn blob_content(&self, oid: &CommitOid) -> GitResult { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + + let blob = self + .repo() + .find_blob(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + + Ok(BlobContent::from_git2(&blob)) + } + + pub fn blob_size(&self, oid: &CommitOid) -> GitResult { + let info = self.blob_get(oid)?; + Ok(info.size) + } + + pub fn blob_create(&self, data: &[u8]) -> GitResult { + let oid = self + .repo() + .blob(data) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn blob_create_from_path(&self, path: &Path) -> GitResult { + let oid = self + .repo() + .blob_path(path) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn blob_create_from_string(&self, content: &str) -> GitResult { + self.blob_create(content.as_bytes()) + } +} diff --git a/libs/git/blob/types.rs b/libs/git/blob/types.rs new file mode 100644 index 0000000..6d93ea9 --- /dev/null +++ b/libs/git/blob/types.rs @@ -0,0 +1,41 @@ +//! Serializable types for the blob domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BlobInfo { + pub oid: CommitOid, + pub size: usize, + pub is_binary: bool, +} + +impl BlobInfo { + pub fn from_git2(blob: &git2::Blob<'_>) -> Self { + Self { + oid: CommitOid::from_git2(blob.id()), + size: blob.size(), + is_binary: blob.is_binary(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BlobContent { + pub oid: CommitOid, + pub size: usize, + pub is_binary: bool, + pub content: Vec, +} + +impl BlobContent { + pub fn from_git2(blob: &git2::Blob<'_>) -> Self { + Self { + oid: CommitOid::from_git2(blob.id()), + size: blob.size(), + is_binary: blob.is_binary(), + content: blob.content().to_vec(), + } + } +} diff --git a/libs/git/branch/merge.rs b/libs/git/branch/merge.rs new file mode 100644 index 0000000..3914f56 --- /dev/null +++ b/libs/git/branch/merge.rs @@ -0,0 +1,150 @@ +//! Branch merge operations. + +use crate::commit::types::CommitOid; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn branch_is_merged(&self, branch: &str, into: &str) -> GitResult { + let branch_oid = self + .branch_target(branch)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", branch)))? + .to_oid()?; + let into_oid = self + .branch_target(into)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", into)))? + .to_oid()?; + + self.repo + .graph_ahead_behind(into_oid, branch_oid) + .map(|(ahead, _)| ahead == 0) + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn branch_merge_base(&self, branch1: &str, branch2: &str) -> GitResult { + let oid1 = self + .branch_target(branch1)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", branch1)))? + .to_oid()?; + let oid2 = self + .branch_target(branch2)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", branch2)))? + .to_oid()?; + + let base = self + .repo() + .merge_base(oid1, oid2) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(base)) + } + + pub fn branch_is_ancestor(&self, child: &str, ancestor: &str) -> GitResult { + let child_oid = self + .branch_target(child)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", child)))? + .to_oid()?; + let ancestor_oid = self + .branch_target(ancestor)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", ancestor)))? + .to_oid()?; + + self.repo + .graph_ahead_behind(child_oid, ancestor_oid) + .map(|(_, behind)| behind == 0) + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn branch_fast_forward(&self, target: &str, force: bool) -> GitResult { + let target_oid = self + .branch_target(target)? + .ok_or_else(|| GitError::InvalidOid(format!("branch {} has no target", target)))? + .to_oid()?; + + let head = self + .repo() + .head() + .ok() + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let head_oid = head + .target() + .ok_or_else(|| GitError::Internal("HEAD has no target".to_string()))?; + let head_name = head + .name() + .ok_or_else(|| GitError::Internal("HEAD has no name".to_string()))?; + + let ref_name = if head_name.starts_with("refs/") { + head_name.to_string() + } else { + format!("refs/heads/{}", head_name) + }; + + let (ahead, behind) = self + .repo() + .graph_ahead_behind(head_oid, target_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + if behind == 0 { + return Ok(CommitOid::from_git2(head_oid)); + } + + if !force && ahead > 0 { + // ahead > 0 && behind > 0 means diverged; ahead == 0 && behind > 0 is valid FF + return Err(GitError::Internal( + "not a fast-forward: branches have diverged".to_string(), + )); + } + + self.repo + .reference_matching( + ref_name.as_str(), + target_oid, + true, + head_oid, + "fast-forward", + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .set_head_detached(target_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .checkout_tree(self.repo.find_commit(target_oid)?.as_object(), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(target_oid)) + } + + pub fn branch_abort_merge(&self) -> GitResult<()> { + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + + let obj = self + .repo() + .find_object(head_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .reset(&obj, git2::ResetType::Hard, None) + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn branch_is_conflicted(&self) -> bool { + self.repo + .index() + .map(|idx| idx.has_conflicts()) + .unwrap_or(false) + } + + pub fn branch_tracking_difference(&self, name: &str) -> GitResult<(usize, usize)> { + let upstream = self.branch_upstream(name)?; + match upstream { + Some(u) => self.branch_ahead_behind(name, &u.name), + None => Ok((0, 0)), + } + } +} diff --git a/libs/git/branch/mod.rs b/libs/git/branch/mod.rs new file mode 100644 index 0000000..80384d2 --- /dev/null +++ b/libs/git/branch/mod.rs @@ -0,0 +1,5 @@ +//! Branch domain — all branch-related operations on a GitDomain. +pub mod merge; +pub mod ops; +pub mod query; +pub mod types; diff --git a/libs/git/branch/ops.rs b/libs/git/branch/ops.rs new file mode 100644 index 0000000..f3d8d23 --- /dev/null +++ b/libs/git/branch/ops.rs @@ -0,0 +1,198 @@ +//! Branch create/delete/rename operations. + +use git2::BranchType; + +use crate::branch::types::BranchInfo; +use crate::commit::types::CommitOid; +use crate::ref_utils::validate_ref_name; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn branch_create(&self, name: &str, oid: &CommitOid, force: bool) -> GitResult { + validate_ref_name(name)?; + + let target = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + + let commit = self + .repo() + .find_commit(target) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let branch = self.repo.branch(name, &commit, force).map_err(|e| { + if e.code() == git2::ErrorCode::Exists && !force { + GitError::BranchExists(name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + let full_name = branch.get().name().unwrap_or("").to_string(); + + Ok(BranchInfo { + name: full_name, + oid: CommitOid::from_git2(target), + is_head: false, + is_remote: false, + is_current: false, + upstream: None, + }) + } + + pub fn branch_create_from_head(&self, name: &str, force: bool) -> GitResult { + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + + self.branch_create(name, &CommitOid::from_git2(head_oid), force) + } + + pub fn branch_delete(&self, name: &str) -> GitResult<()> { + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else { + format!("refs/heads/{}", name) + }; + + let mut branch = self + .repo() + .find_branch(&full_name, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + branch + .delete() + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn branch_delete_remote(&self, name: &str) -> GitResult<()> { + let full_name = format!("refs/remotes/{}", name); + + let mut branch = self + .repo() + .find_branch(&full_name, BranchType::Remote) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + branch + .delete() + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn branch_rename(&self, old_name: &str, new_name: &str) -> GitResult { + validate_ref_name(new_name)?; + + let old_full = if old_name.starts_with("refs/heads/") { + old_name.to_string() + } else { + format!("refs/heads/{}", old_name) + }; + + let mut branch = self + .repo() + .find_branch(&old_full, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(old_name.to_string()))?; + + let target = branch + .get() + .target() + .ok_or_else(|| GitError::Internal("branch has no target".to_string()))?; + + branch.rename(new_name, false).map_err(|e| { + if e.code() == git2::ErrorCode::Exists { + GitError::BranchExists(new_name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + Ok(BranchInfo { + name: format!("refs/heads/{}", new_name), + oid: CommitOid::from_git2(target), + is_head: false, + is_remote: false, + is_current: false, + upstream: None, + }) + } + + pub fn branch_move(&self, name: &str, new_name: &str, force: bool) -> GitResult { + validate_ref_name(new_name)?; + + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else { + format!("refs/heads/{}", name) + }; + + let mut branch = self + .repo() + .find_branch(&full_name, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target = branch + .get() + .target() + .ok_or_else(|| GitError::Internal("branch has no target".to_string()))?; + + let commit = self + .repo() + .find_commit(target) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Delete the old branch first. If deletion fails, we fail atomically. + branch + .delete() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Create the new branch pointing to the same commit. + self.repo().branch(new_name, &commit, force).map_err(|e| { + if e.code() == git2::ErrorCode::Exists && !force { + GitError::BranchExists(new_name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + Ok(BranchInfo { + name: format!("refs/heads/{}", new_name), + oid: CommitOid::from_git2(target), + is_head: false, + is_remote: false, + is_current: false, + upstream: None, + }) + } + + pub fn branch_set_upstream(&self, name: &str, upstream: Option<&str>) -> GitResult<()> { + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else { + format!("refs/heads/{}", name) + }; + + let mut branch = self + .repo() + .find_branch(&full_name, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + match upstream { + Some(u) => { + let upstream_name = if u.starts_with("refs/remotes/") || u.contains('/') { + u.to_string() + } else { + format!("refs/remotes/{}", u) + }; + + branch + .set_upstream(Some(&upstream_name)) + .map_err(|e| GitError::Internal(e.to_string())) + } + None => branch + .set_upstream(None) + .map_err(|e| GitError::Internal(e.to_string())), + } + } +} diff --git a/libs/git/branch/query.rs b/libs/git/branch/query.rs new file mode 100644 index 0000000..5c5f49f --- /dev/null +++ b/libs/git/branch/query.rs @@ -0,0 +1,265 @@ +//! Branch querying operations. + +use git2::BranchType; + +use crate::branch::types::{BranchDiff, BranchInfo, BranchSummary}; +use crate::commit::types::CommitOid; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn branch_list(&self, remote_only: bool) -> GitResult> { + let branch_type = if remote_only { + BranchType::Remote + } else { + BranchType::Local + }; + + let mut branches = Vec::with_capacity(16); + let head_name = self.repo.head().ok().and_then(|r| { + r.name() + .map(|name| name.strip_prefix("refs/heads/").unwrap_or(name).to_string()) + }); + + for branch_result in self + .repo() + .branches(Some(branch_type)) + .map_err(|e| GitError::Internal(e.to_string()))? + { + let (branch, _) = branch_result.map_err(|e| GitError::Internal(e.to_string()))?; + if let Some(name) = branch.name().ok().flatten() { + let Some(target) = branch.get().target() else { + continue; // skip branches without a target + }; + let name = name.to_string(); + let oid = CommitOid::from_git2(target); + let is_head = head_name.as_ref().map_or(false, |h| h == &name); + let is_current = branch.is_head(); + + branches.push(BranchInfo { + name, + oid, + is_head, + is_remote: remote_only, + is_current, + upstream: None, + }); + } + } + + Ok(branches) + } + + pub fn branch_list_local(&self) -> GitResult> { + self.branch_list(false) + } + + pub fn branch_list_remote(&self) -> GitResult> { + self.branch_list(true) + } + + pub fn branch_list_all(&self) -> GitResult> { + let mut all = self.branch_list(false)?; + let remote = self.branch_list(true)?; + for mut r in remote { + r.is_remote = true; + all.push(r); + } + Ok(all) + } + + pub fn branch_summary(&self) -> GitResult { + let local = self.branch_list(false)?; + let remote = self.branch_list(true)?; + Ok(BranchSummary { + local_count: local.len(), + remote_count: remote.len(), + all_count: local.len() + remote.len(), + }) + } + + pub fn branch_get(&self, name: &str) -> GitResult { + // Determine full ref name and branch type + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else if name.starts_with("refs/remotes/") { + name.to_string() + } else if name.contains('/') { + // e.g. "origin/main" → remote branch + format!("refs/remotes/{}", name) + } else { + format!("refs/heads/{}", name) + }; + + let branch = self + .repo() + .find_branch(&full_name, git2::BranchType::Local) + .or_else(|_| self.repo.find_branch(&full_name, git2::BranchType::Remote)) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target = branch + .get() + .target() + .ok_or_else(|| GitError::Internal("branch has no target".to_string()))?; + let oid = CommitOid::from_git2(target); + let head_name = self + .repo() + .head() + .ok() + .and_then(|r| r.name().map(String::from)); + let branch_name = branch.name().ok().flatten().unwrap_or_default(); + + Ok(BranchInfo { + name: branch_name.to_string(), + oid, + is_head: head_name.as_ref().map_or(false, |h| h == &full_name), + is_remote: full_name.starts_with("refs/remotes/"), + is_current: branch.is_head(), + upstream: branch.upstream().ok().map(|u| { + u.name() + .ok() + .and_then(|n| n) + .unwrap_or_default() + .to_string() + }), + }) + } + + pub fn branch_exists(&self, name: &str) -> bool { + let full_name = if name.starts_with("refs/heads/") || name.starts_with("refs/remotes/") { + name.to_string() + } else if name.contains('/') { + format!("refs/remotes/{}", name) + } else { + format!("refs/heads/{}", name) + }; + + self.repo.find_branch(&full_name, BranchType::Local).is_ok() + || self + .repo() + .find_branch(&full_name, BranchType::Remote) + .is_ok() + } + + pub fn branch_is_head(&self, name: &str) -> GitResult { + let info = self.branch_get(name)?; + Ok(info.is_head) + } + + pub fn branch_current(&self) -> GitResult> { + let head = self + .repo() + .head() + .map_err(|e| GitError::Internal(e.to_string()))?; + + if let Some(name) = head.name() { + let name = name.to_string(); + if name.starts_with("refs/heads/") { + return Ok(Some(self.branch_get(&name)?)); + } + } + + Ok(None) + } + + pub fn branch_target(&self, name: &str) -> GitResult> { + let info = self.branch_get(name)?; + if info.oid.0.is_empty() { + Ok(None) + } else { + Ok(Some(info.oid)) + } + } + + pub fn branch_upstream(&self, name: &str) -> GitResult> { + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else { + format!("refs/heads/{}", name) + }; + + let branch = self + .repo() + .find_branch(&full_name, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + match branch.upstream() { + Ok(up) => { + let up_target = up.get().target().ok_or_else(|| { + GitError::Internal("upstream branch has no target".to_string()) + })?; + Ok(Some(BranchInfo { + name: up + .name() + .ok() + .and_then(|n| n) + .unwrap_or_default() + .to_string(), + oid: CommitOid::from_git2(up_target), + is_head: false, + is_remote: true, + is_current: false, + upstream: None, + })) + } + Err(e) if e.code() == git2::ErrorCode::NotFound => Ok(None), + Err(e) => Err(GitError::Internal(e.to_string())), + } + } + + pub fn branch_upstream_name(&self, name: &str) -> GitResult> { + let upstream = self.branch_upstream(name)?; + Ok(upstream.map(|u| u.name)) + } + + pub fn branch_has_upstream(&self, name: &str) -> GitResult { + let full_name = if name.starts_with("refs/heads/") { + name.to_string() + } else { + format!("refs/heads/{}", name) + }; + + let branch = self + .repo() + .find_branch(&full_name, BranchType::Local) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + Ok(branch.upstream().is_ok()) + } + + pub fn branch_is_detached(&self) -> bool { + self.repo.head().map_or(false, |h| !h.is_branch()) + } + + pub fn branch_diff(&self, local: &str, remote: &str) -> GitResult { + let local_oid = self.branch_target(local)?; + let remote_oid = self.branch_target(remote)?; + + match (local_oid, remote_oid) { + (Some(l), Some(r)) => { + let l_oid = l.to_oid()?; + let r_oid = r.to_oid()?; + + let (ahead, behind) = self + .repo() + .graph_ahead_behind(l_oid, r_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(BranchDiff { + ahead, + behind, + diverged: ahead > 0 && behind > 0, + }) + } + _ => Ok(BranchDiff { + ahead: 0, + behind: 0, + diverged: false, + }), + } + } + + pub fn branch_ahead_behind(&self, local: &str, upstream: &str) -> GitResult<(usize, usize)> { + let diff = self.branch_diff(local, upstream)?; + Ok((diff.ahead, diff.behind)) + } +} diff --git a/libs/git/branch/types.rs b/libs/git/branch/types.rs new file mode 100644 index 0000000..2587e1f --- /dev/null +++ b/libs/git/branch/types.rs @@ -0,0 +1,29 @@ +//! Serializable types for the branch domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BranchInfo { + pub name: String, + pub oid: CommitOid, + pub is_head: bool, + pub is_remote: bool, + pub is_current: bool, + pub upstream: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BranchSummary { + pub local_count: usize, + pub remote_count: usize, + pub all_count: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BranchDiff { + pub ahead: usize, + pub behind: usize, + pub diverged: bool, +} diff --git a/libs/git/commit/cherry_pick.rs b/libs/git/commit/cherry_pick.rs new file mode 100644 index 0000000..7302f6d --- /dev/null +++ b/libs/git/commit/cherry_pick.rs @@ -0,0 +1,105 @@ +//! Cherry-pick operations. + +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn commit_cherry_pick( + &self, + cherrypick_oid: &CommitOid, + author: &CommitSignature, + committer: &CommitSignature, + message: Option<&str>, + mainline: u32, + update_ref: Option<&str>, + ) -> GitResult { + let cherrypick_commit = self + .repo() + .find_commit(cherrypick_oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(cherrypick_oid.to_string()))?; + + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let our_commit = self + .repo() + .find_commit(head_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut index = self + .repo() + .cherrypick_commit(&cherrypick_commit, &our_commit, mainline, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let tree_oid = index + .write_tree_to(&*self.repo()) + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree = self + .repo() + .find_tree(tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let msg = message.map(String::from).unwrap_or_else(|| { + format!( + "cherry-pick commit {}", + &cherrypick_oid.to_string()[..8.min(cherrypick_oid.to_string().len())] + ) + }); + + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + + let oid = self + .repo() + .commit(update_ref, &author, &committer, &msg, &tree, &[&our_commit]) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_cherry_pick_sequence( + &self, + cherrypick_oids: &[CommitOid], + author: &CommitSignature, + committer: &CommitSignature, + update_ref: Option<&str>, + ) -> GitResult { + let mut last_oid: Option = None; + for oid in cherrypick_oids { + last_oid = Some(self.commit_cherry_pick(oid, author, committer, None, 0, update_ref)?); + } + last_oid.ok_or_else(|| GitError::Internal("no commits to cherry-pick".to_string())) + } + + pub fn commit_cherry_pick_abort(&self, reset_type: &str) -> GitResult<()> { + let kind = match reset_type { + "soft" => git2::ResetType::Soft, + "mixed" => git2::ResetType::Mixed, + "hard" => git2::ResetType::Hard, + _ => { + return Err(GitError::Internal(format!( + "unknown reset type: {}", + reset_type + ))); + } + }; + + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let obj = self + .repo() + .find_object(head_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .reset(&obj, kind, None) + .map_err(|e| GitError::Internal(e.to_string())) + } +} diff --git a/libs/git/commit/create.rs b/libs/git/commit/create.rs new file mode 100644 index 0000000..92c7c01 --- /dev/null +++ b/libs/git/commit/create.rs @@ -0,0 +1,314 @@ +//! Commit creation and modification operations. + +use git2::Signature; + +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +fn parents_to_git2<'a>( + repo: &'a git2::Repository, + parent_ids: &[CommitOid], +) -> GitResult>> { + parent_ids + .iter() + .map(|oid| { + repo.find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string())) + }) + .collect() +} + +impl GitDomain { + pub fn commit_default_signature(&self) -> GitResult { + let sig = self + .repo() + .signature() + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitSignature::from_git2(sig)) + } + + pub fn commit_signature_now(&self, name: &str, email: &str) -> GitResult { + let sig = Signature::now(name, email).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitSignature::from_git2(sig)) + } + + pub fn commit_signature_at( + &self, + name: &str, + email: &str, + time_secs: i64, + offset_minutes: i32, + ) -> GitResult { + let time = git2::Time::new(time_secs, offset_minutes); + let sig = + Signature::new(name, email, &time).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitSignature::from_git2(sig)) + } + + pub fn commit_signature_to_git2(&self, sig: &CommitSignature) -> GitResult> { + let time = git2::Time::new(sig.time_secs, sig.offset_minutes); + Signature::new(&sig.name, &sig.email, &time).map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn commit_create( + &self, + update_ref: Option<&str>, + author: &CommitSignature, + committer: &CommitSignature, + message: &str, + tree_id: &CommitOid, + parent_ids: &[CommitOid], + ) -> GitResult { + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + let tree = self + .repo() + .find_tree(tree_id.to_oid()?) + .map_err(|e| GitError::Internal(e.to_string()))?; + let parents = parents_to_git2(&*self.repo(), parent_ids)?; + let parent_refs: Vec<&git2::Commit<'_>> = + parents.iter().map(|p| p as &git2::Commit).collect(); + let oid = self + .repo() + .commit( + update_ref, + &author, + &committer, + message, + &tree, + &parent_refs, + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_create_from_index( + &self, + update_ref: Option<&str>, + author: &CommitSignature, + committer: &CommitSignature, + message: &str, + parent_ids: &[CommitOid], + ) -> GitResult { + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + let mut index = self + .repo() + .index() + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree_oid = index + .write_tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree = self + .repo() + .find_tree(tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let parents = parents_to_git2(&*self.repo(), parent_ids)?; + let parent_refs: Vec<&git2::Commit<'_>> = + parents.iter().map(|p| p as &git2::Commit).collect(); + let oid = self + .repo() + .commit( + update_ref, + &author, + &committer, + message, + &tree, + &parent_refs, + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_sign( + &self, + author: &CommitSignature, + committer: &CommitSignature, + message: &str, + tree_id: &CommitOid, + parent_ids: &[CommitOid], + gpg_signature: &str, + signature_field: Option<&str>, + ) -> GitResult { + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + let tree = self + .repo() + .find_tree(tree_id.to_oid()?) + .map_err(|e| GitError::Internal(e.to_string()))?; + let parents = parents_to_git2(&*self.repo(), parent_ids)?; + let parent_refs: Vec<&git2::Commit<'_>> = + parents.iter().map(|p| p as &git2::Commit).collect(); + let buf = self + .repo() + .commit_create_buffer(&author, &committer, message, &tree, &parent_refs) + .map_err(|e| GitError::Internal(e.to_string()))?; + let buf_str = std::str::from_utf8(&*buf).map_err(|e| GitError::Internal(e.to_string()))?; + let oid = self + .repo() + .commit_signed(buf_str, gpg_signature, signature_field) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_extract_signature( + &self, + commit_oid: &CommitOid, + signature_field: Option<&str>, + ) -> GitResult> { + match self + .repo() + .extract_signature(&commit_oid.to_oid()?, signature_field) + { + Ok((sig_buf, content_buf)) => Ok(Some(( + String::from_utf8_lossy(&*sig_buf).to_string(), + String::from_utf8_lossy(&*content_buf).to_string(), + ))), + Err(e) => { + if e.code() == git2::ErrorCode::NotFound { + Ok(None) + } else { + Err(GitError::Internal(e.to_string())) + } + } + } + } + + pub fn commit_empty( + &self, + update_ref: Option<&str>, + author: &CommitSignature, + committer: &CommitSignature, + message: &str, + parent_ids: &[CommitOid], + ) -> GitResult { + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + let tree = if let Some(first) = parent_ids.first() { + let commit = self + .repo() + .find_commit(first.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(first.to_string()))?; + commit + .tree() + .map_err(|e| GitError::Internal(e.to_string()))? + } else { + let mut index = self + .repo() + .index() + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree_oid = index + .write_tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + self.repo() + .find_tree(tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))? + }; + let parents = parents_to_git2(&*self.repo(), parent_ids)?; + let parent_refs: Vec<&git2::Commit<'_>> = + parents.iter().map(|p| p as &git2::Commit).collect(); + let oid = self + .repo() + .commit( + update_ref, + &author, + &committer, + message, + &tree, + &parent_refs, + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_amend( + &self, + commit_oid: &CommitOid, + update_ref: Option<&str>, + author: Option<&CommitSignature>, + committer: Option<&CommitSignature>, + message_encoding: Option<&str>, + message: Option<&str>, + tree_id: Option<&CommitOid>, + ) -> GitResult { + let commit = self + .repo() + .find_commit(commit_oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(commit_oid.to_string()))?; + let author = author + .map(|a| self.commit_signature_to_git2(a)) + .transpose()?; + let committer = committer + .map(|c| self.commit_signature_to_git2(c)) + .transpose()?; + let tree = tree_id + .map(|t| { + self.repo + .find_tree(t.to_oid()?) + .map_err(|e| GitError::Internal(e.to_string())) + }) + .transpose()?; + let oid = commit + .amend( + update_ref, + author.as_ref(), + committer.as_ref(), + message_encoding, + message, + tree.as_ref(), + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_amend_author( + &self, + commit_oid: &CommitOid, + new_author: &CommitSignature, + update_ref: Option<&str>, + ) -> GitResult { + self.commit_amend( + commit_oid, + update_ref, + Some(new_author), + None, + None, + None, + None, + ) + } + + pub fn commit_amend_message( + &self, + commit_oid: &CommitOid, + new_message: &str, + update_ref: Option<&str>, + ) -> GitResult { + self.commit_amend( + commit_oid, + update_ref, + None, + None, + None, + Some(new_message), + None, + ) + } + + pub fn commit_amend_tree( + &self, + commit_oid: &CommitOid, + new_tree_id: &CommitOid, + update_ref: Option<&str>, + ) -> GitResult { + self.commit_amend( + commit_oid, + update_ref, + None, + None, + None, + None, + Some(new_tree_id), + ) + } +} diff --git a/libs/git/commit/graph.rs b/libs/git/commit/graph.rs new file mode 100644 index 0000000..92fa9ed --- /dev/null +++ b/libs/git/commit/graph.rs @@ -0,0 +1,218 @@ +//! Visual ASCII commit graph output. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::*; +use crate::{GitDomain, GitResult}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitGraphLine { + pub oid: CommitOid, + pub graph_chars: String, + pub refs: String, + pub short_message: String, + /// Column index (0-based) where the commit dot is rendered. + /// Used by @gitgraph/react to assign lane color. + pub lane_index: usize, + /// Full commit metadata (author, timestamp, parents, etc.) + pub meta: CommitMeta, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitGraph { + pub lines: Vec, + pub max_parents: usize, +} + +#[derive(Default)] +struct GraphState { + column_to_commit: Vec>, + column_prev_commit: Vec>, + active_columns: usize, +} + +pub struct CommitGraphOptions { + pub rev: Option, + pub limit: usize, + pub first_parent_only: bool, + pub show_refs: bool, +} + +impl Default for CommitGraphOptions { + fn default() -> Self { + Self { + rev: None, + limit: 0, + first_parent_only: false, + show_refs: true, + } + } +} + +impl CommitGraphOptions { + pub fn rev(mut self, rev: &str) -> Self { + self.rev = Some(rev.to_string()); + self + } + + pub fn limit(mut self, n: usize) -> Self { + self.limit = n; + self + } + + pub fn first_parent_only(mut self) -> Self { + self.first_parent_only = true; + self + } + + pub fn no_refs(mut self) -> Self { + self.show_refs = false; + self + } +} + +impl GitDomain { + pub fn commit_graph(&self, opts: CommitGraphOptions) -> GitResult { + let commits = self.commit_walk(crate::commit::traverse::CommitWalkOptions { + rev: opts.rev.map(String::from), + sort: CommitSort(git2::Sort::TOPOLOGICAL.bits() | git2::Sort::TIME.bits()), + limit: opts.limit, + first_parent_only: opts.first_parent_only, + })?; + + let mut state = GraphState::default(); + let mut lines = Vec::with_capacity(commits.len()); + let mut max_parents = 0; + + for commit in commits { + max_parents = max_parents.max(commit.parent_ids.len()); + let line = self.build_graph_line(&commit, &mut state, opts.show_refs)?; + lines.push(line); + } + + Ok(CommitGraph { lines, max_parents }) + } + + pub fn commit_graph_simple(&self, rev: Option<&str>, limit: usize) -> GitResult { + let opts = CommitGraphOptions::default().limit(limit); + let opts = match rev { + Some(r) => opts.rev(r), + None => opts, + }; + self.commit_graph(opts) + } + + fn build_graph_line( + &self, + commit: &CommitMeta, + state: &mut GraphState, + show_refs: bool, + ) -> GitResult { + let oid = commit.oid.clone(); + let short_message = commit.summary.clone(); + + let refs = if show_refs { + self.get_commit_refs_string(&oid)? + } else { + String::new() + }; + + let (graph_chars, lane_index) = self.render_graph_chars(commit, state); + + Ok(CommitGraphLine { + oid, + graph_chars, + refs, + short_message, + lane_index, + meta: commit.clone(), + }) + } + + fn get_commit_refs_string(&self, oid: &CommitOid) -> GitResult { + let refs = self.commit_refs(oid)?; + let parts: Vec = refs + .iter() + .map(|r| { + if r.is_tag { + r.name.trim_start_matches("refs/tags/").to_string() + } else { + r.name.trim_start_matches("refs/heads/").to_string() + } + }) + .collect(); + Ok(parts.join(", ")) + } + + fn render_graph_chars(&self, commit: &CommitMeta, state: &mut GraphState) -> (String, usize) { + let current_oid = &commit.oid; + let num_parents = commit.parent_ids.len(); + + let mut result = String::new(); + let active: Vec = state + .column_to_commit + .iter() + .map(|col_oid| col_oid.as_ref().map_or(false, |col| col == current_oid)) + .collect(); + + let current_col = if let Some(pos) = active.iter().position(|a| *a) { + pos + } else { + if state.active_columns == 0 { + state.active_columns = 1; + } + let col = state.column_to_commit.len(); + state.column_to_commit.push(Some(current_oid.clone())); + state.column_prev_commit.push(None); + state.active_columns += 1; + col + }; + + for i in 0..state.column_to_commit.len() { + if i == current_col { + result.push_str("* "); + } else if state.column_to_commit[i].is_some() { + result.push_str("| "); + } else { + result.push_str(" "); + } + } + + if num_parents > 1 { + let available = state + .column_to_commit + .len() + .saturating_sub(state.active_columns); + if available > 0 { + result.push_str(&format!("/{}", " ".repeat(available * 2 - 1))); + } + } + + for col in 0..state.column_to_commit.len() { + if state.column_to_commit[col].as_ref() == Some(current_oid) { + state.column_to_commit[col] = None; + state.active_columns = state.active_columns.saturating_sub(1); + } + } + + for (i, parent) in commit.parent_ids.iter().enumerate() { + if i == 0 && state.active_columns == 0 { + state.column_to_commit[0] = Some(parent.clone()); + state.column_prev_commit[0] = Some(current_oid.clone()); + state.active_columns = 1; + } else { + if let Some(idx) = state.column_to_commit.iter().position(|c| c.is_none()) { + state.column_to_commit[idx] = Some(parent.clone()); + state.column_prev_commit[idx] = Some(current_oid.clone()); + state.active_columns += 1; + } else { + state.column_to_commit.push(Some(parent.clone())); + state.column_prev_commit.push(Some(current_oid.clone())); + state.active_columns += 1; + } + } + } + + (result, current_col) + } +} diff --git a/libs/git/commit/meta.rs b/libs/git/commit/meta.rs new file mode 100644 index 0000000..bb2a6ce --- /dev/null +++ b/libs/git/commit/meta.rs @@ -0,0 +1,217 @@ +//! Commit reference (branch/tag) and reflog operations. + +use std::collections::HashMap; + +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn commit_refs(&self, oid: &CommitOid) -> GitResult> { + let _ = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + + let mut refs = Vec::new(); + + for branch_result in self + .repo() + .branches(Some(git2::BranchType::Local)) + .map_err(|e| GitError::Internal(e.to_string()))? + { + let (branch, _) = branch_result.map_err(|e| GitError::Internal(e.to_string()))?; + + if let Some(target) = branch.get().target() { + if target == oid.to_oid()? { + let name = branch + .name() + .map_err(|e| GitError::Internal(e.to_string()))? + .unwrap_or("") + .to_string(); + refs.push(CommitRefInfo { + name, + target: oid.clone(), + is_remote: false, + is_tag: false, + }); + } + } + } + + if let Ok(walk) = self.repo.references() { + for ref_result in walk { + if let Ok(r) = ref_result { + if let Some(name) = r.name() { + if name.starts_with("refs/tags/") { + if let Some(target) = r.target() { + if target == oid.to_oid()? { + refs.push(CommitRefInfo { + name: name.to_string(), + target: oid.clone(), + is_remote: false, + is_tag: true, + }); + } + } + } + } + } + } + } + + Ok(refs) + } + + pub fn commit_branches(&self, oid: &CommitOid) -> GitResult> { + let refs = self.commit_refs(oid)?; + Ok(refs + .into_iter() + .filter(|r| !r.is_remote && !r.is_tag) + .map(|r| r.name) + .collect()) + } + + pub fn commit_tags(&self, oid: &CommitOid) -> GitResult> { + let refs = self.commit_refs(oid)?; + Ok(refs + .into_iter() + .filter(|r| r.is_tag) + .map(|r| r.name) + .collect()) + } + + pub fn commit_is_tip(&self, oid: &CommitOid) -> GitResult { + let refs = self.commit_refs(oid)?; + Ok(!refs.is_empty()) + } + + pub fn commit_is_default_tip(&self, oid: &CommitOid) -> GitResult { + let default = self.repo.head().ok().and_then(|r| r.target()); + Ok(default == Some(oid.to_oid()?)) + } + + pub fn commit_reflog( + &self, + oid: &CommitOid, + refname: Option<&str>, + ) -> GitResult> { + let refname = match refname { + Some(name) => name.to_string(), + None => self + .repo() + .head() + .ok() + .and_then(|r| r.name().map(|n| n.to_owned())) + .ok_or_else(|| GitError::Internal("HEAD has no name".to_string()))?, + }; + + let reflog = self + .repo() + .reflog(&refname) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut entries = Vec::new(); + let oid_val = oid.to_oid()?; + for entry in reflog.iter() { + if entry.id_new() == oid_val || entry.id_old() == oid_val { + let sig = entry.committer(); + entries.push(CommitReflogEntry { + oid_new: CommitOid::from_git2(entry.id_new()), + oid_old: CommitOid::from_git2(entry.id_old()), + committer_name: sig.name().unwrap_or("").to_string(), + committer_email: sig.email().unwrap_or("").to_string(), + time_secs: sig.when().seconds(), + message: entry.message().map(String::from), + ref_name: refname.clone(), + }); + } + } + + Ok(entries) + } + + pub fn commit_ref_count(&self, oid: &CommitOid) -> GitResult { + let refs = self.commit_refs(oid)?; + Ok(refs.len()) + } + + /// Returns all refs (branches + tags) grouped by commit OID. + pub fn refs_grouped(&self) -> GitResult, Vec)>> { + let mut map: HashMap, Vec)> = HashMap::new(); + + for branch_result in self + .repo() + .branches(Some(git2::BranchType::Local)) + .map_err(|e| GitError::Internal(e.to_string()))? + { + let (branch, _) = branch_result.map_err(|e| GitError::Internal(e.to_string()))?; + if let Some(target) = branch.get().target() { + let oid_str = target.to_string(); + let name = branch + .name() + .map_err(|e| GitError::Internal(e.to_string()))? + .unwrap_or("") + .to_string(); + let entry = map + .entry(oid_str) + .or_insert_with(|| (Vec::new(), Vec::new())); + entry.0.push(name); + } + } + + if let Ok(walk) = self.repo.references() { + for ref_result in walk { + if let Ok(r) = ref_result { + if let Some(name) = r.name() { + if name.starts_with("refs/tags/") { + if let Some(target) = r.target() { + let oid_str = target.to_string(); + let full_name = name.to_string(); + let entry = map + .entry(oid_str) + .or_insert_with(|| (Vec::new(), Vec::new())); + entry.1.push(full_name); + } + } + } + } + } + } + + Ok(map) + } + + /// Returns all reflog entries for a given ref (defaults to HEAD). + pub fn reflog_entries(&self, refname: Option<&str>) -> GitResult> { + let refname = match refname { + Some(name) => name.to_string(), + None => self + .repo() + .head() + .ok() + .and_then(|r| r.name().map(|n| n.to_owned())) + .ok_or_else(|| GitError::Internal("HEAD has no name".to_string()))?, + }; + + let reflog = self + .repo() + .reflog(&refname) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut entries = Vec::new(); + for entry in reflog.iter() { + let sig = entry.committer(); + entries.push(CommitReflogEntry { + oid_new: CommitOid::from_git2(entry.id_new()), + oid_old: CommitOid::from_git2(entry.id_old()), + committer_name: sig.name().unwrap_or("").to_string(), + committer_email: sig.email().unwrap_or("").to_string(), + time_secs: sig.when().seconds(), + message: entry.message().map(String::from), + ref_name: refname.clone(), + }); + } + + Ok(entries) + } +} diff --git a/libs/git/commit/mod.rs b/libs/git/commit/mod.rs new file mode 100644 index 0000000..42e1f1f --- /dev/null +++ b/libs/git/commit/mod.rs @@ -0,0 +1,10 @@ +//! Commit domain — all commit-related operations on a GitDomain. +pub mod cherry_pick; +pub mod create; +pub mod graph; +pub mod meta; +pub mod query; +pub mod rebase; +pub mod revert; +pub mod traverse; +pub mod types; diff --git a/libs/git/commit/query.rs b/libs/git/commit/query.rs new file mode 100644 index 0000000..26c56f7 --- /dev/null +++ b/libs/git/commit/query.rs @@ -0,0 +1,231 @@ +//! Commit querying operations. + +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn commit_get(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(CommitMeta::from_git2(&commit)) + } + + pub fn commit_get_prefix(&self, prefix: &str) -> GitResult { + let commit = self + .repo() + .find_commit_by_prefix(prefix) + .map_err(|_e| GitError::InvalidOid(format!("prefix: {}", prefix)))?; + Ok(CommitMeta::from_git2(&commit)) + } + + pub fn commit_exists(&self, oid: &CommitOid) -> bool { + match oid.to_oid() { + Ok(oid) => self.repo.find_commit(oid).is_ok(), + Err(_) => false, + } + } + + pub fn commit_is_commit(&self, oid: &CommitOid) -> bool { + match oid.to_oid() { + Ok(oid) => match self.repo.find_object(oid, None) { + Ok(obj) => obj.kind() == Some(git2::ObjectType::Commit), + Err(_) => false, + }, + Err(_) => false, + } + } + + pub fn commit_message(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.message().unwrap_or("").to_string()) + } + + pub fn commit_summary(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.summary().unwrap_or("").to_string()) + } + + pub fn commit_short_id(&self, oid: &CommitOid) -> GitResult { + let _ = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + let len = oid.0.len(); + if len < 7 { + return Err(GitError::InvalidOid(oid.to_string())); + } + Ok(oid.0[..7].to_string()) + } + + pub fn commit_author(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(CommitSignature::from_git2(commit.author())) + } + + pub fn commit_committer(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(CommitSignature::from_git2(commit.committer())) + } + + pub fn commit_time(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.time().seconds()) + } + + pub fn commit_time_offset(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.time().offset_minutes()) + } + + pub fn commit_encoding(&self, oid: &CommitOid) -> GitResult> { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.message_encoding().map(String::from)) + } + + pub fn commit_tree_id(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(CommitOid::from_git2(commit.tree_id())) + } + + pub fn commit_parent_count(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.parent_count()) + } + + pub fn commit_parent_ids(&self, oid: &CommitOid) -> GitResult> { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.parent_ids().map(CommitOid::from_git2).collect()) + } + + pub fn commit_parent(&self, oid: &CommitOid, index: usize) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + let parent = commit + .parent(index) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(CommitMeta::from_git2(&parent)) + } + + pub fn commit_first_parent(&self, oid: &CommitOid) -> GitResult> { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + if commit.parent_count() > 0 { + let parent = commit + .parent(0) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Some(CommitMeta::from_git2(&parent))) + } else { + Ok(None) + } + } + + pub fn commit_is_merge(&self, oid: &CommitOid) -> GitResult { + let commit = self + .repo() + .find_commit(oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(oid.to_string()))?; + Ok(commit.parent_count() > 1) + } + + pub fn commit_log( + &self, + rev: Option<&str>, + offset: usize, + limit: usize, + ) -> GitResult> { + let mut revwalk = self + .repo() + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + + if let Some(r) = rev { + if r.contains("..") { + revwalk + .push_range(r) + .map_err(|e| GitError::Internal(e.to_string()))?; + } else { + revwalk + .push_ref(r) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + } else { + revwalk + .push_head() + .map_err(|e| GitError::Internal(e.to_string()))?; + } + + revwalk + .set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut commits = Vec::new(); + let target = offset.saturating_add(limit); + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + if target > 0 && commits.len() >= target { + break; + } + if let Ok(commit) = self.repo.find_commit(oid) { + commits.push(CommitMeta::from_git2(&commit)); + } + } + + Ok(commits.into_iter().skip(offset).take(limit).collect()) + } + + pub fn commit_range(&self, from: &str, to: &str) -> GitResult> { + let range = format!("{}..{}", from, to); + self.commit_log(Some(&range), 0, 0) + } + + pub fn commit_count(&self, from: Option<&str>, to: Option<&str>) -> GitResult { + let rev = match (from, to) { + (Some(f), Some(t)) => Some(format!("{}..{}", f, t)), + (Some(f), None) => Some(f.to_string()), + (None, Some(t)) => Some(t.to_string()), + (None, None) => None, + }; + let commits = self.commit_log(rev.as_deref(), 0, 0)?; + Ok(commits.len()) + } + + pub fn commit_total(&self, rev: Option<&str>) -> GitResult { + self.commit_count(None, rev) + } +} diff --git a/libs/git/commit/rebase.rs b/libs/git/commit/rebase.rs new file mode 100644 index 0000000..49a94cc --- /dev/null +++ b/libs/git/commit/rebase.rs @@ -0,0 +1,136 @@ +//! Rebase operations. +//! +//! Manual rebase implementation since git2's `RebaseSession` has type inference +//! issues with `Option<&[AnnotatedCommit]>` in Rust. The approach: +//! 1. Walk all commits from `base_oid` (exclusive) to `source_oid` (inclusive). +//! 2. For each commit, apply its tree diff onto the current HEAD. +//! 3. Create a new commit with the same message on top of the current HEAD. + +use crate::commit::types::CommitOid; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + /// Rebase the commits on `source_oid` onto `base_oid`. + pub fn rebase_commits( + &self, + base_oid: &CommitOid, + source_oid: &CommitOid, + ) -> GitResult { + let base = base_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(base_oid.to_string()))?; + let source = source_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(source_oid.to_string()))?; + + // Collect all commits from base (exclusive) to source (inclusive). + let mut revwalk = self + .repo() + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .push(source) + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .hide(base) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut commits: Vec = Vec::new(); + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + commits.push(commit); + } + + if commits.is_empty() { + return Err(GitError::Internal("No commits to rebase".to_string())); + } + + // Sort oldest-first (topological). Use OID as tiebreaker for commits with identical timestamps. + commits.sort_by(|a, b| { + let time_cmp = a.time().seconds().cmp(&b.time().seconds()); + if time_cmp != std::cmp::Ordering::Equal { + time_cmp + } else { + // Same timestamp: use OID for deterministic ordering. + a.id().cmp(&b.id()) + } + }); + + let sig = self + .repo() + .signature() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Start with the base commit's tree. + let base_commit = self + .repo() + .find_commit(base) + .map_err(|e| GitError::Internal(e.to_string()))?; + let mut current_tree = base_commit + .tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut last_oid = base; + + for commit in &commits { + let parent_tree = ¤t_tree; + let commit_tree = commit + .tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Diff the parent tree with this commit's tree. + let diff = self + .repo() + .diff_tree_to_tree(Some(parent_tree), Some(&commit_tree), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Apply the diff to parent_tree, producing a new tree. + let mut new_index = self + .repo() + .apply_to_tree(parent_tree, &diff, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let new_tree_oid = new_index + .write_tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + + current_tree = self + .repo() + .find_tree(new_tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Find the parent commit for the rebased commit. + let parent_commit = self + .repo() + .find_commit(last_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Create the rebased commit on top of the current base. + let new_oid = self + .repo() + .commit( + Some("HEAD"), + &sig, + &sig, + commit.message().unwrap_or(""), + ¤t_tree, + &[&parent_commit], + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + + last_oid = new_oid; + } + + Ok(CommitOid::from_git2(last_oid)) + } + + pub fn rebase_abort(&self) -> GitResult<()> { + // git2 rebase sessions are not persistent across process exits. + // The caller resets HEAD to the original position. + Ok(()) + } +} diff --git a/libs/git/commit/revert.rs b/libs/git/commit/revert.rs new file mode 100644 index 0000000..28a7e7a --- /dev/null +++ b/libs/git/commit/revert.rs @@ -0,0 +1,156 @@ +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn commit_revert( + &self, + revert_oid: &CommitOid, + author: &CommitSignature, + committer: &CommitSignature, + message: Option<&str>, + mainline: u32, + update_ref: Option<&str>, + ) -> GitResult { + let revert_commit = self + .repo() + .find_commit(revert_oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(revert_oid.to_string()))?; + + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let our_commit = self + .repo() + .find_commit(head_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut index = self + .repo() + .revert_commit(&revert_commit, &our_commit, mainline, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let tree_oid = index + .write_tree_to(&*self.repo()) + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree = self + .repo() + .find_tree(tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let original_summary = revert_commit.summary().unwrap_or(""); + let msg: String = match message { + Some(m) => m.to_string(), + None => format!("Revert \"{}\"", original_summary), + }; + + let author = self.commit_signature_to_git2(author)?; + let committer = self.commit_signature_to_git2(committer)?; + + // When mainline > 0, revert creates a merge commit with two parents: + // (mainline_parent, our_commit). Otherwise single parent (our_commit). + let oid = if mainline > 0 { + let parent_count = revert_commit.parent_count(); + let idx = (mainline - 1) as usize; + if idx >= parent_count { + return Err(GitError::Internal(format!( + "mainline parent index {} out of range (commit has {} parents)", + idx, parent_count + ))); + } + let mainline_parent = revert_commit + .parent(idx) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.repo() + .commit( + update_ref, + &author, + &committer, + &msg, + &tree, + &[&mainline_parent, &our_commit], + ) + .map_err(|e| GitError::Internal(e.to_string()))? + } else { + self.repo() + .commit(update_ref, &author, &committer, &msg, &tree, &[&our_commit]) + .map_err(|e| GitError::Internal(e.to_string()))? + }; + + Ok(CommitOid::from_git2(oid)) + } + + pub fn commit_revert_would_conflict( + &self, + revert_oid: &CommitOid, + mainline: u32, + ) -> GitResult { + let revert_commit = self + .repo() + .find_commit(revert_oid.to_oid()?) + .map_err(|_e| GitError::ObjectNotFound(revert_oid.to_string()))?; + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let our_commit = self + .repo() + .find_commit(head_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + match self + .repo() + .revert_commit(&revert_commit, &our_commit, mainline, None) + { + Ok(index) => { + let has_conflicts = (0..index.len()).any(|i| { + index + .get(i) + .map(|e| (e.flags >> 12) & 0x3 > 0) + .unwrap_or(false) + }); + Ok(has_conflicts) + } + Err(e) => { + if e.code() == git2::ErrorCode::Conflict { + Ok(true) + } else { + Err(GitError::Internal(e.to_string())) + } + } + } + } + + pub fn commit_revert_abort(&self, reset_type: &str) -> GitResult<()> { + let kind = match reset_type { + "soft" => git2::ResetType::Soft, + "mixed" => git2::ResetType::Mixed, + "hard" => git2::ResetType::Hard, + _ => { + return Err(GitError::Internal(format!( + "unknown reset type: {}", + reset_type + ))); + } + }; + + let head_oid = self + .repo() + .head() + .ok() + .and_then(|r| r.target()) + .ok_or_else(|| GitError::Internal("HEAD is not attached".to_string()))?; + let obj = self + .repo() + .find_object(head_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo() + .reset(&obj, kind, None) + .map_err(|e| GitError::Internal(e.to_string())) + } +} diff --git a/libs/git/commit/traverse.rs b/libs/git/commit/traverse.rs new file mode 100644 index 0000000..0507bac --- /dev/null +++ b/libs/git/commit/traverse.rs @@ -0,0 +1,218 @@ +//! Commit traversal and iteration. + +use crate::commit::types::*; +use crate::{GitDomain, GitError, GitResult}; + +#[derive(Debug, Clone)] +pub struct CommitWalkOptions { + pub rev: Option, + pub sort: CommitSort, + pub limit: usize, + pub first_parent_only: bool, +} + +impl CommitWalkOptions { + pub fn new() -> Self { + Self { + rev: None, + sort: CommitSort(git2::Sort::TOPOLOGICAL.bits() | git2::Sort::TIME.bits()), + limit: 0, + first_parent_only: false, + } + } + + pub fn rev(mut self, rev: &str) -> Self { + self.rev = Some(rev.to_string()); + self + } + + pub fn topological(mut self) -> Self { + self.sort = CommitSort(git2::Sort::TOPOLOGICAL.bits()); + self + } + + pub fn time_order(mut self) -> Self { + self.sort = CommitSort(git2::Sort::TIME.bits()); + self + } + + pub fn reverse(mut self) -> Self { + self.sort = CommitSort(self.sort.0 | git2::Sort::REVERSE.bits()); + self + } + + pub fn limit(mut self, n: usize) -> Self { + self.limit = n; + self + } + + pub fn first_parent(mut self) -> Self { + self.first_parent_only = true; + self + } +} + +impl Default for CommitWalkOptions { + fn default() -> Self { + Self::new() + } +} + +impl GitDomain { + pub fn commit_walk(&self, opts: CommitWalkOptions) -> GitResult> { + let mut revwalk = self + .repo() + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + + revwalk + .set_sorting(opts.sort.to_git2()) + .map_err(|e| GitError::Internal(e.to_string()))?; + + if let Some(ref r) = opts.rev { + if r.contains("..") { + revwalk + .push_range(r) + .map_err(|e| GitError::Internal(e.to_string()))?; + } else { + revwalk + .push_ref(r) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + } else { + revwalk + .push_head() + .map_err(|e| GitError::Internal(e.to_string()))?; + } + + let mut commits = Vec::new(); + + if opts.first_parent_only { + let mut prev_oid: Option = None; + + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + + if let Some(prev) = prev_oid { + if let Ok(commit) = self.repo.find_commit(oid) { + if commit.parent_ids().next() == Some(prev) { + if limit_check(&commits, opts.limit) { + break; + } + commits.push(CommitMeta::from_git2(&commit)); + prev_oid = Some(oid); + } + } + } else { + if let Ok(commit) = self.repo.find_commit(oid) { + if limit_check(&commits, opts.limit) { + break; + } + commits.push(CommitMeta::from_git2(&commit)); + prev_oid = Some(oid); + } + } + } + } else { + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + if limit_check(&commits, opts.limit) { + break; + } + if let Ok(commit) = self.repo.find_commit(oid) { + commits.push(CommitMeta::from_git2(&commit)); + } + } + } + + Ok(commits) + } + + pub fn commit_topo_walk(&self, rev: Option<&str>, limit: usize) -> GitResult> { + self.commit_walk(CommitWalkOptions { + rev: rev.map(String::from), + sort: CommitSort(git2::Sort::TOPOLOGICAL.bits() | git2::Sort::TIME.bits()), + limit, + first_parent_only: false, + }) + } + + pub fn commit_reverse_walk( + &self, + rev: Option<&str>, + limit: usize, + ) -> GitResult> { + self.commit_walk(CommitWalkOptions { + rev: rev.map(String::from), + sort: CommitSort(git2::Sort::TIME.bits() | git2::Sort::REVERSE.bits()), + limit, + first_parent_only: false, + }) + } + + pub fn commit_mainline(&self, rev: Option<&str>, limit: usize) -> GitResult> { + self.commit_walk(CommitWalkOptions { + rev: rev.map(String::from), + sort: CommitSort(git2::Sort::TOPOLOGICAL.bits() | git2::Sort::TIME.bits()), + limit, + first_parent_only: true, + }) + } + + pub fn commit_ancestors(&self, oid: &CommitOid, limit: usize) -> GitResult> { + let mut revwalk = self + .repo() + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME) + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .push(oid.to_oid()?) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut commits = Vec::new(); + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + if limit > 0 && commits.len() >= limit { + break; + } + if let Ok(commit) = self.repo.find_commit(oid) { + commits.push(CommitMeta::from_git2(&commit)); + } + } + Ok(commits) + } + + pub fn commit_descendants(&self, oid: &CommitOid, limit: usize) -> GitResult> { + let range = format!("{}..", oid); + self.commit_walk(CommitWalkOptions { + rev: Some(range), + sort: CommitSort(git2::Sort::TOPOLOGICAL.bits() | git2::Sort::TIME.bits()), + limit, + first_parent_only: false, + }) + } + + pub fn resolve_rev(&self, rev: &str) -> GitResult { + if let Ok(oid) = git2::Oid::from_str(rev) { + return Ok(CommitOid::from_git2(oid)); + } + + if let Ok(reference) = self.repo.find_reference(rev) { + if let Some(target) = reference.target() { + return Ok(CommitOid::from_git2(target)); + } + } + + if let Ok(commit) = self.repo.revparse_single(rev) { + return Ok(CommitOid::from_git2(commit.id())); + } + + Err(GitError::InvalidOid(format!("cannot resolve: {}", rev))) + } +} + +fn limit_check(commits: &[CommitMeta], limit: usize) -> bool { + limit > 0 && commits.len() >= limit +} diff --git a/libs/git/commit/types.rs b/libs/git/commit/types.rs new file mode 100644 index 0000000..d400f11 --- /dev/null +++ b/libs/git/commit/types.rs @@ -0,0 +1,163 @@ +//! Serializable types for the commit domain. + +use crate::GitError; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct CommitOid(pub String); + +impl CommitOid { + pub fn new(hex: &str) -> Self { + Self(hex.to_lowercase()) + } + + pub fn from_oid(oid: git2::Oid) -> Self { + Self(oid.to_string()) + } + + pub fn as_str(&self) -> &str { + &self.0 + } + + pub fn to_oid(&self) -> Result { + git2::Oid::from_str(&self.0).map_err(|_| GitError::InvalidOid(self.0.clone())) + } + + pub fn from_git2(oid: git2::Oid) -> Self { + Self(oid.to_string()) + } +} + +impl std::fmt::Display for CommitOid { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl AsRef for CommitOid { + fn as_ref(&self) -> &str { + &self.0 + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitSignature { + pub name: String, + pub email: String, + pub time_secs: i64, + pub offset_minutes: i32, +} + +impl CommitSignature { + pub fn from_git2(sig: git2::Signature<'_>) -> Self { + Self { + name: sig.name().unwrap_or("").to_string(), + email: sig.email().unwrap_or("").to_string(), + time_secs: sig.when().seconds(), + offset_minutes: sig.when().offset_minutes(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitMeta { + pub oid: CommitOid, + pub message: String, + pub summary: String, + pub author: CommitSignature, + pub committer: CommitSignature, + pub tree_id: CommitOid, + pub parent_ids: Vec, + pub encoding: Option, +} + +impl CommitMeta { + pub fn from_git2(commit: &git2::Commit<'_>) -> Self { + Self { + oid: CommitOid::from_git2(commit.id()), + message: commit.message().unwrap_or("").to_string(), + summary: commit.summary().unwrap_or("").to_string(), + author: CommitSignature::from_git2(commit.author()), + committer: CommitSignature::from_git2(commit.committer()), + tree_id: CommitOid::from_git2(commit.tree_id()), + parent_ids: commit.parent_ids().map(CommitOid::from_git2).collect(), + encoding: commit.message_encoding().map(String::from), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitDiffStats { + pub oid: String, + pub files_changed: usize, + pub insertions: usize, + pub deletions: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitDiffFile { + pub path: Option, + pub status: String, + pub is_binary: bool, + pub size: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitDiffHunk { + pub old_start: u32, + pub old_lines: u32, + pub new_start: u32, + pub new_lines: u32, + pub header: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitBlameHunk { + pub commit_oid: CommitOid, + pub final_start_line: u32, + pub final_lines: u32, + pub orig_start_line: u32, + pub orig_lines: u32, + pub boundary: bool, + pub orig_path: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitBlameLine { + pub commit_oid: CommitOid, + pub line_no: u32, + pub content: String, + pub orig_path: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitRefInfo { + pub name: String, + pub target: CommitOid, + pub is_remote: bool, + pub is_tag: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitReflogEntry { + pub oid_new: CommitOid, + pub oid_old: CommitOid, + pub committer_name: String, + pub committer_email: String, + pub time_secs: i64, + pub message: Option, + pub ref_name: String, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)] +pub struct CommitSort(pub u32); + +impl CommitSort { + pub const TOPOLOGICAL: Self = Self(git2::Sort::TOPOLOGICAL.bits()); + pub const TIME: Self = Self(git2::Sort::TIME.bits()); + pub const REVERSE: Self = Self(git2::Sort::REVERSE.bits()); + + pub fn to_git2(self) -> git2::Sort { + git2::Sort::from_bits(self.0).unwrap_or(git2::Sort::NONE) + } +} diff --git a/libs/git/config/mod.rs b/libs/git/config/mod.rs new file mode 100644 index 0000000..d761bf0 --- /dev/null +++ b/libs/git/config/mod.rs @@ -0,0 +1,3 @@ +//! Config domain — repository and global config read/write operations. +pub mod ops; +pub mod types; diff --git a/libs/git/config/ops.rs b/libs/git/config/ops.rs new file mode 100644 index 0000000..2cf2f0d --- /dev/null +++ b/libs/git/config/ops.rs @@ -0,0 +1,93 @@ +//! Config operations. + +use std::cell::RefCell; +use std::collections::HashMap; +use std::rc::Rc; + +use crate::config::types::{ConfigEntry, ConfigSnapshot}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + /// Open the repository-level config. + pub fn config(&self) -> GitResult { + let cfg = self + .repo() + .config() + .map_err(|e| GitError::ConfigError(e.to_string()))?; + Ok(GitConfig { + inner: Rc::new(RefCell::new(cfg)), + }) + } + + pub fn config_get(&self, key: &str) -> GitResult> { + let cfg = self.config()?; + cfg.get_str(key) + .map(Some) + .map_err(|e| GitError::ConfigError(e.to_string())) + } + + pub fn config_set(&self, key: &str, value: &str) -> GitResult<()> { + let cfg = self.config()?; + cfg.set(key, value) + .map_err(|e| GitError::ConfigError(e.to_string())) + } + + pub fn config_delete(&self, key: &str) -> GitResult<()> { + let cfg = self.config()?; + cfg.delete(key) + .map_err(|e| GitError::ConfigError(e.to_string())) + } + + /// List all config entries. Optionally filter by key prefix (e.g. "user"). + pub fn config_entries(&self, prefix: Option<&str>) -> GitResult { + let cfg = self.config()?; + let mut entries = Vec::new(); + + let glob = prefix.filter(|p| !p.is_empty()); + + let binding = cfg.inner.borrow(); + let _ = binding + .entries(glob) + .map_err(|e: git2::Error| GitError::ConfigError(e.to_string()))? + .for_each(|entry| { + let name = entry.name().unwrap_or("").to_string(); + let value = entry.value().unwrap_or("").to_string(); + entries.push(ConfigEntry { name, value }); + }); + + Ok(ConfigSnapshot { entries }) + } + + pub fn config_has(&self, key: &str) -> GitResult { + let cfg = self.config()?; + Ok(cfg.get_str(key).is_ok()) + } + + pub fn config_get_family(&self, prefix: &str) -> GitResult> { + let snapshot = self.config_entries(Some(prefix))?; + Ok(snapshot + .entries + .into_iter() + .map(|e| (e.name, e.value)) + .collect()) + } +} + +/// A wrapper around git2::Config providing a cleaner API. +pub struct GitConfig { + inner: Rc>, +} + +impl GitConfig { + fn get_str(&self, key: &str) -> Result { + self.inner.borrow().get_str(key).map(String::from) + } + + fn set(&self, key: &str, value: &str) -> Result<(), git2::Error> { + self.inner.borrow_mut().set_str(key, value) + } + + fn delete(&self, key: &str) -> Result<(), git2::Error> { + self.inner.borrow_mut().remove(key) + } +} diff --git a/libs/git/config/types.rs b/libs/git/config/types.rs new file mode 100644 index 0000000..ed1e846 --- /dev/null +++ b/libs/git/config/types.rs @@ -0,0 +1,16 @@ +//! Serializable types for the config domain. + +use serde::{Deserialize, Serialize}; + +/// A single config entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConfigEntry { + pub name: String, + pub value: String, +} + +/// A snapshot of config entries matching a query. +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ConfigSnapshot { + pub entries: Vec, +} diff --git a/libs/git/description/mod.rs b/libs/git/description/mod.rs new file mode 100644 index 0000000..4a69a0c --- /dev/null +++ b/libs/git/description/mod.rs @@ -0,0 +1,39 @@ +//! Description domain — repository description file read/write (used by GitWeb). + +use std::path::PathBuf; + +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + /// Path to the description file. + fn description_path(&self) -> PathBuf { + PathBuf::from(self.repo().path()).join("description") + } + + /// Read the repository description. + /// Returns "Unnamed repository" if the file does not exist. + pub fn description_get(&self) -> GitResult { + let path = self.description_path(); + if !path.exists() { + return Ok("Unnamed repository".to_string()); + } + let content = + std::fs::read_to_string(&path).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(content.trim().to_string()) + } + + /// Write the repository description. + pub fn description_set(&self, description: &str) -> GitResult<()> { + let path = self.description_path(); + std::fs::write(&path, description.trim()).map_err(|e| GitError::IoError(e.to_string())) + } + + pub fn description_exists(&self) -> bool { + self.description_path().exists() + } + + /// Reset description to the default "Unnamed repository". + pub fn description_reset(&self) -> GitResult<()> { + self.description_set("Unnamed repository") + } +} diff --git a/libs/git/diff/mod.rs b/libs/git/diff/mod.rs new file mode 100644 index 0000000..e742ef8 --- /dev/null +++ b/libs/git/diff/mod.rs @@ -0,0 +1,3 @@ +//! Diff domain — all diff-related operations on a GitDomain. +pub mod ops; +pub mod types; diff --git a/libs/git/diff/ops.rs b/libs/git/diff/ops.rs new file mode 100644 index 0000000..a950cff --- /dev/null +++ b/libs/git/diff/ops.rs @@ -0,0 +1,510 @@ +//! Diff operations. + +use std::cell::RefCell; + +use crate::commit::types::CommitOid; +use crate::diff::types::{ + DiffDelta, DiffHunk, DiffLine, DiffOptions, DiffResult, DiffStats, SideBySideChangeType, + SideBySideDiffResult, SideBySideFile, SideBySideLine, +}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn diff_tree_to_tree( + &self, + old_tree: Option<&CommitOid>, + new_tree: Option<&CommitOid>, + opts: Option, + ) -> GitResult { + let old_tree = match old_tree { + Some(oid) => { + let o = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + Some( + self.repo() + .find_tree(o) + .map_err(|e| GitError::Internal(e.to_string()))?, + ) + } + None => None, + }; + let new_tree = match new_tree { + Some(oid) => { + let o = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + Some( + self.repo() + .find_tree(o) + .map_err(|e| GitError::Internal(e.to_string()))?, + ) + } + None => None, + }; + + let mut git_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::DiffOptions::new); + + // git2 requires at least one tree to be Some + let diff = match (old_tree.as_ref(), new_tree.as_ref()) { + (Some(old), Some(new)) => { + self.repo() + .diff_tree_to_tree(Some(old), Some(new), Some(&mut git_opts)) + } + (Some(old), None) => { + self.repo() + .diff_tree_to_tree(Some(old), None, Some(&mut git_opts)) + } + (None, Some(new)) => { + self.repo() + .diff_tree_to_tree(None, Some(new), Some(&mut git_opts)) + } + (None, None) => { + return Err(GitError::Internal( + "Both old_tree and new_tree are None".into(), + )); + } + } + .map_err(|e| GitError::Internal(e.to_string()))?; + + build_diff_result(&diff) + } + + pub fn diff_commit_to_workdir( + &self, + commit: &CommitOid, + opts: Option, + ) -> GitResult { + let oid = commit + .to_oid() + .map_err(|_| GitError::InvalidOid(commit.to_string()))?; + + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree = self + .repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut git_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::DiffOptions::new); + + let diff = self + .repo() + .diff_tree_to_workdir(Some(&tree), Some(&mut git_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + build_diff_result(&diff) + } + + pub fn diff_commit_to_index( + &self, + commit: &CommitOid, + opts: Option, + ) -> GitResult { + let oid = commit + .to_oid() + .map_err(|_| GitError::InvalidOid(commit.to_string()))?; + + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let tree = self + .repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Get the index as a tree for comparison. + let mut index = self + .repo() + .index() + .map_err(|e| GitError::Internal(e.to_string()))?; + let index_tree_oid = index + .write_tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + let index_tree = self + .repo() + .find_tree(index_tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut git_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::DiffOptions::new); + + let diff = self + .repo() + .diff_tree_to_tree(Some(&tree), Some(&index_tree), Some(&mut git_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + build_diff_result(&diff) + } + + pub fn diff_workdir_to_index(&self, opts: Option) -> GitResult { + let mut git_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::DiffOptions::new); + + let diff = self + .repo() + .diff_tree_to_workdir(None, Some(&mut git_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + build_diff_result(&diff) + } + + pub fn diff_index_to_tree( + &self, + tree: &CommitOid, + opts: Option, + ) -> GitResult { + let oid = tree + .to_oid() + .map_err(|_| GitError::InvalidOid(tree.to_string()))?; + + let tree = self + .repo() + .find_tree(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut git_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::DiffOptions::new); + + let diff = self + .repo() + .diff_tree_to_tree(Some(&tree), None, Some(&mut git_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + + build_diff_result(&diff) + } + + pub fn diff_stats(&self, old_tree: &CommitOid, new_tree: &CommitOid) -> GitResult { + let old_oid = old_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(old_tree.to_string()))?; + let new_oid = new_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(new_tree.to_string()))?; + + let old_tree = self + .repo() + .find_tree(old_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let new_tree = self + .repo() + .find_tree(new_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let diff = self + .repo() + .diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let stats = diff + .stats() + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(DiffStats::from_git2(&stats)) + } + + pub fn diff_patch_id(&self, old_tree: &CommitOid, new_tree: &CommitOid) -> GitResult { + let old_oid = old_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(old_tree.to_string()))?; + let new_oid = new_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(new_tree.to_string()))?; + + let old_tree = self + .repo() + .find_tree(old_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let new_tree = self + .repo() + .find_tree(new_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let diff = self + .repo() + .diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let patch_id = diff + .patchid(None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(patch_id.to_string()) + } +} + +fn build_diff_result(diff: &git2::Diff<'_>) -> GitResult { + let stats = diff + .stats() + .map_err(|e| GitError::Internal(e.to_string()))?; + + let delta_count = diff.deltas().len(); + let deltas: RefCell> = RefCell::new(Vec::with_capacity(delta_count)); + let delta_hunks: RefCell> = RefCell::new(Vec::new()); + let delta_lines: RefCell> = RefCell::new(Vec::new()); + let counter: RefCell = RefCell::new(0); + + let mut file_cb = |_delta: git2::DiffDelta<'_>, _progress: f32| -> bool { + let count = *counter.borrow(); + if count > 0 { + let prev_idx = count - 1; + let hunks = delta_hunks.take(); + let lines = delta_lines.take(); + if let Some(prev_delta) = diff.get_delta(prev_idx) { + deltas + .borrow_mut() + .push(DiffDelta::from_git2(&prev_delta, hunks, lines)); + } + } + *counter.borrow_mut() = count + 1; + true + }; + + let mut hunk_cb = |_delta: git2::DiffDelta<'_>, hunk: git2::DiffHunk<'_>| -> bool { + delta_hunks.borrow_mut().push(DiffHunk::from_git2(&hunk)); + true + }; + + let mut line_cb = |_delta: git2::DiffDelta<'_>, + _hunk: Option>, + line: git2::DiffLine<'_>| + -> bool { + delta_lines.borrow_mut().push(DiffLine::from_git2(&line)); + true + }; + + diff.foreach(&mut file_cb, None, Some(&mut hunk_cb), Some(&mut line_cb)) + .map_err(|e| GitError::Internal(e.to_string()))?; + { + let count = *counter.borrow(); + if count > 0 { + let last_idx = count - 1; + let hunks = delta_hunks.take(); + let lines = delta_lines.take(); + if let Some(last_delta) = diff.get_delta(last_idx) { + deltas + .borrow_mut() + .push(DiffDelta::from_git2(&last_delta, hunks, lines)); + } + } + } + + Ok(DiffResult { + stats: DiffStats::from_git2(&stats), + deltas: deltas.into_inner(), + }) +} + +/// The algorithm walks the unified diff line-by-line and produces rows where: +/// - **Added** lines appear only on the right side. +/// - **Deleted** lines appear only on the left side. +/// - When a deletion is immediately followed by an addition they are rendered as a +/// **Modified** pair (two separate rows). +/// - **Context** lines appear on both sides with the same content. +/// - **Empty** filler rows are inserted so that left and right line numbers stay aligned. +pub fn diff_to_side_by_side(diff: &DiffResult) -> SideBySideDiffResult { + let mut files: Vec = Vec::with_capacity(diff.deltas.len()); + let mut total_additions = 0usize; + let mut total_deletions = 0usize; + + for delta in &diff.deltas { + let (path, is_binary, is_rename) = + if delta.status == crate::diff::types::DiffDeltaStatus::Renamed { + ( + delta + .new_file + .path + .clone() + .or_else(|| delta.old_file.path.clone()) + .unwrap_or_default(), + delta.new_file.is_binary || delta.old_file.is_binary, + true, + ) + } else { + ( + delta + .new_file + .path + .clone() + .or_else(|| delta.old_file.path.clone()) + .unwrap_or_default(), + delta.new_file.is_binary, + false, + ) + }; + + if is_binary { + files.push(SideBySideFile { + path, + additions: 0, + deletions: 0, + is_binary: true, + is_rename, + lines: vec![], + }); + continue; + } + + let lines = build_side_by_side_lines(&delta.lines); + let additions = lines + .iter() + .filter(|l| matches!(l.change_type, SideBySideChangeType::Added)) + .count(); + let deletions = lines + .iter() + .filter(|l| matches!(l.change_type, SideBySideChangeType::Removed)) + .count(); + + total_additions += additions; + total_deletions += deletions; + + files.push(SideBySideFile { + path, + additions, + deletions, + is_binary: false, + is_rename, + lines, + }); + } + + SideBySideDiffResult { + files, + total_additions, + total_deletions, + } +} + +fn build_side_by_side_lines(unified: &[DiffLine]) -> Vec { + let mut result: Vec = Vec::with_capacity(unified.len() * 2); + let mut i = 0; + + while i < unified.len() { + let line = &unified[i]; + + match line.origin { + '+' => { + // Collect a run of consecutive additions. + let mut added_lines: Vec<&DiffLine> = vec![line]; + while i + 1 < unified.len() && unified[i + 1].origin == '+' { + i += 1; + added_lines.push(&unified[i]); + } + + // Peek at the previous deletion run to pair with additions. + let mut deleted_lines: Vec<&DiffLine> = vec![]; + // Backtrack to find deletions right before this run. + let Some(j_base) = i.checked_sub(added_lines.len()) else { + // Additions at start of diff — no preceding deletions to pair with. + // Emit all as unpaired Added below. + for k in 0..added_lines.len() { + let add = added_lines[k]; + result.push(SideBySideLine { + left_line_no: None, + right_line_no: add.new_lineno, + left_content: String::new(), + right_content: add.content.clone(), + change_type: SideBySideChangeType::Added, + }); + } + i += 1; + continue; + }; + let mut j = j_base; + while j > 0 && unified[j].origin == '-' { + j -= 1; + } + // Collect deletions from j+1 up to (but not including) the first addition. + let del_start = if j > 0 { j + 1 } else { 0 }; + for k in del_start..i { + if unified[k].origin == '-' { + deleted_lines.push(&unified[k]); + } + } + + // If we have paired deletions, emit them as Modified pairs. + let pairs = deleted_lines.len().min(added_lines.len()); + for k in 0..pairs { + let del = deleted_lines[k]; + let add = added_lines[k]; + result.push(SideBySideLine { + left_line_no: del.old_lineno, + right_line_no: add.new_lineno, + left_content: del.content.clone(), + right_content: add.content.clone(), + change_type: SideBySideChangeType::Modified, + }); + } + + // Remaining unpaired additions. + for k in pairs..added_lines.len() { + let add = added_lines[k]; + result.push(SideBySideLine { + left_line_no: None, + right_line_no: add.new_lineno, + left_content: String::new(), + right_content: add.content.clone(), + change_type: SideBySideChangeType::Added, + }); + } + + // Remaining unpaired deletions (only possible if deletions > additions). + for k in pairs..deleted_lines.len() { + let del = deleted_lines[k]; + result.push(SideBySideLine { + left_line_no: del.old_lineno, + right_line_no: None, + left_content: del.content.clone(), + right_content: String::new(), + change_type: SideBySideChangeType::Removed, + }); + } + } + '-' => { + // Collect a run of consecutive deletions. + let mut deleted_lines: Vec<&DiffLine> = vec![line]; + while i + 1 < unified.len() && unified[i + 1].origin == '-' { + i += 1; + deleted_lines.push(&unified[i]); + } + + // Emit each deletion (unless already paired above). + // We defer pairing to the addition-handling block above, + // so here we just emit unpaired deletions. + for del in &deleted_lines { + result.push(SideBySideLine { + left_line_no: del.old_lineno, + right_line_no: None, + left_content: del.content.clone(), + right_content: String::new(), + change_type: SideBySideChangeType::Removed, + }); + } + } + _ => { + // Context line — appears on both sides. + result.push(SideBySideLine { + left_line_no: line.old_lineno, + right_line_no: line.new_lineno, + left_content: line.content.clone(), + right_content: line.content.clone(), + change_type: SideBySideChangeType::Unchanged, + }); + } + } + + i += 1; + } + + result +} diff --git a/libs/git/diff/types.rs b/libs/git/diff/types.rs new file mode 100644 index 0000000..6460472 --- /dev/null +++ b/libs/git/diff/types.rs @@ -0,0 +1,313 @@ +//! Serializable types for the diff domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum DiffDeltaStatus { + Unmodified, + Added, + Deleted, + Modified, + Renamed, + Copied, + Ignored, + Untracked, + Typechange, + Unreadable, + Conflicted, +} + +impl DiffDeltaStatus { + pub fn from_git2(status: git2::Delta) -> Self { + match status { + git2::Delta::Unmodified => Self::Unmodified, + git2::Delta::Added => Self::Added, + git2::Delta::Deleted => Self::Deleted, + git2::Delta::Modified => Self::Modified, + git2::Delta::Renamed => Self::Renamed, + git2::Delta::Copied => Self::Copied, + git2::Delta::Ignored => Self::Ignored, + git2::Delta::Untracked => Self::Untracked, + git2::Delta::Typechange => Self::Typechange, + git2::Delta::Unreadable => Self::Unreadable, + git2::Delta::Conflicted => Self::Conflicted, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffFile { + pub oid: Option, + pub path: Option, + pub size: u64, + pub is_binary: bool, +} + +impl DiffFile { + pub fn from_git2(file: &git2::DiffFile<'_>) -> Self { + Self { + oid: if file.is_valid_id() { + Some(CommitOid::from_git2(file.id())) + } else { + None + }, + path: file.path().map(|p| p.to_string_lossy().to_string()), + size: file.size(), + is_binary: file.is_binary(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffHunk { + pub old_start: u32, + pub old_lines: u32, + pub new_start: u32, + pub new_lines: u32, + pub header: String, +} + +impl DiffHunk { + pub fn from_git2(hunk: &git2::DiffHunk<'_>) -> Self { + Self { + old_start: hunk.old_start(), + old_lines: hunk.old_lines(), + new_start: hunk.new_start(), + new_lines: hunk.new_lines(), + header: String::from_utf8_lossy(hunk.header()).to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffLine { + pub content: String, + pub origin: char, + pub old_lineno: Option, + pub new_lineno: Option, + pub num_lines: u32, + pub content_offset: i64, +} + +impl DiffLine { + pub fn from_git2(line: &git2::DiffLine<'_>) -> Self { + Self { + content: String::from_utf8_lossy(line.content()).to_string(), + origin: line.origin(), + old_lineno: line.old_lineno(), + new_lineno: line.new_lineno(), + num_lines: line.num_lines(), + content_offset: line.content_offset(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffDelta { + pub status: DiffDeltaStatus, + pub old_file: DiffFile, + pub new_file: DiffFile, + pub nfiles: u16, + pub hunks: Vec, + pub lines: Vec, +} + +impl DiffDelta { + pub fn from_git2( + delta: &git2::DiffDelta<'_>, + hunks: Vec, + lines: Vec, + ) -> Self { + Self { + status: DiffDeltaStatus::from_git2(delta.status()), + old_file: DiffFile::from_git2(&delta.old_file()), + new_file: DiffFile::from_git2(&delta.new_file()), + nfiles: delta.nfiles(), + hunks, + lines, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffStats { + pub files_changed: usize, + pub insertions: usize, + pub deletions: usize, +} + +impl DiffStats { + pub fn from_git2(stats: &git2::DiffStats) -> Self { + Self { + files_changed: stats.files_changed(), + insertions: stats.insertions(), + deletions: stats.deletions(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DiffResult { + pub stats: DiffStats, + pub deltas: Vec, +} + +// --------------------------------------------------------------------------- +// Side-by-side diff types +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SideBySideChangeType { + /// Both sides show the same context line. + Unchanged, + /// Line was added (only on the right / new side). + Added, + /// Line was removed (only on the left / old side). + Removed, + /// A modified region — left shows old line, right shows new line. + Modified, + /// Empty row used to align paired add/remove lines. + Empty, +} + +impl SideBySideChangeType {} + +/// One row in the side-by-side output. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SideBySideLine { + /// Line number in the old / left file. `None` for added-only rows. + pub left_line_no: Option, + /// Line number in the new / right file. `None` for deleted-only rows. + pub right_line_no: Option, + /// Content displayed on the left side. + pub left_content: String, + /// Content displayed on the right side. + pub right_content: String, + /// How this row should be rendered. + pub change_type: SideBySideChangeType, +} + +/// A single file in a side-by-side diff. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SideBySideFile { + /// Path of the file (prefer the new path for renames). + pub path: String, + /// Number of additions in this file. + pub additions: usize, + /// Number of deletions in this file. + pub deletions: usize, + /// Whether this file is binary. + pub is_binary: bool, + /// Whether this file was renamed. + pub is_rename: bool, + /// All rows in the side-by-side view. + pub lines: Vec, +} + +/// The complete side-by-side diff result for all files. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SideBySideDiffResult { + pub files: Vec, + pub total_additions: usize, + pub total_deletions: usize, +} + +#[derive(Debug, Clone, Default)] +pub struct DiffOptions { + context_lines: u32, + pathspec: Vec, + include_untracked: bool, + include_ignored: bool, + include_unmodified: bool, + ignore_whitespace: bool, + force_text: bool, + skip_binary_check: bool, + reverse: bool, +} + +impl DiffOptions { + pub fn new() -> Self { + Self::default() + } + + pub fn context_lines(mut self, lines: u32) -> Self { + self.context_lines = lines; + self + } + + pub fn pathspec(mut self, path: &str) -> Self { + self.pathspec.push(path.to_string()); + self + } + + pub fn include_untracked(mut self) -> Self { + self.include_untracked = true; + self + } + + pub fn include_ignored(mut self) -> Self { + self.include_ignored = true; + self + } + + pub fn include_unmodified(mut self) -> Self { + self.include_unmodified = true; + self + } + + pub fn ignore_whitespace(mut self) -> Self { + self.ignore_whitespace = true; + self + } + + pub fn force_text(mut self) -> Self { + self.force_text = true; + self + } + + pub fn skip_binary_check(mut self) -> Self { + self.skip_binary_check = true; + self + } + + pub fn reverse(mut self) -> Self { + self.reverse = true; + self + } + + pub fn to_git2(&self) -> git2::DiffOptions { + let mut opts = git2::DiffOptions::new(); + if self.context_lines != 3 { + opts.context_lines(self.context_lines); + } + for p in &self.pathspec { + opts.pathspec(p); + } + if self.include_untracked { + opts.include_untracked(true); + } + if self.include_ignored { + opts.include_ignored(true); + } + if self.include_unmodified { + opts.include_unmodified(true); + } + if self.ignore_whitespace { + opts.ignore_whitespace(true); + } + if self.force_text { + opts.force_text(true); + } + if self.skip_binary_check { + opts.skip_binary_check(true); + } + if self.reverse { + opts.reverse(true); + } + opts + } +} diff --git a/libs/git/domain.rs b/libs/git/domain.rs new file mode 100644 index 0000000..a8cb941 --- /dev/null +++ b/libs/git/domain.rs @@ -0,0 +1,58 @@ +use std::path::Path; +use std::sync::Arc; + +use crate::GitError; +use git2::Repository; +use models::repos::repo; + +#[derive(Clone)] +pub struct GitDomain { + pub(crate) repo: Arc, +} + +// SAFETY: git2's Repository uses internal locking for thread-safe operations. +// We additionally enforce exclusive access via Arc::get_mut in repo_mut(). +// All mutable access is gated through the synchronous methods of HookMetaDataSync, +// which are called from a single blocking thread per sync cycle. +#[allow(unsafe_code)] +unsafe impl Send for GitDomain {} +#[allow(unsafe_code)] +unsafe impl Sync for GitDomain {} + +impl GitDomain { + pub fn from_model(model: repo::Model) -> crate::GitResult { + let repo = + Repository::open(model.storage_path).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Self { + repo: Arc::new(repo), + }) + } + pub fn open>(path: P) -> crate::GitResult { + let repo = Repository::open(path).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Self { + repo: Arc::new(repo), + }) + } + + pub fn open_workdir>(path: P) -> crate::GitResult { + let repo = Repository::open_bare(path).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Self { + repo: Arc::new(repo), + }) + } + + pub fn init_bare>(path: P) -> crate::GitResult { + let repo = Repository::init_bare(path).map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Self { + repo: Arc::new(repo), + }) + } + pub fn repo(&self) -> &Repository { + &self.repo + } + + pub fn repo_mut(&mut self) -> crate::GitResult<&mut Repository> { + Arc::get_mut(&mut self.repo) + .ok_or_else(|| GitError::Internal("GitDomain requires exclusive access".to_string())) + } +} diff --git a/libs/git/error.rs b/libs/git/error.rs new file mode 100644 index 0000000..0a1d9bb --- /dev/null +++ b/libs/git/error.rs @@ -0,0 +1,98 @@ +//! Git domain error types. + +use std::fmt; + +/// Result type alias for Git operations. +pub type GitResult = Result; + +/// Git domain errors. +#[derive(Debug, Clone)] +pub enum GitError { + /// Repository is not found or not accessible. + NotFound(String), + /// Object not found. + ObjectNotFound(String), + /// Reference not found. + RefNotFound(String), + /// Invalid reference name. + InvalidRefName(String), + /// Invalid object id. + InvalidOid(String), + /// Branch already exists. + BranchExists(String), + /// Tag already exists. + TagExists(String), + /// Branch is protected. + BranchProtected(String), + /// Merge conflict. + MergeConflict(String), + /// Hook execution failed. + HookFailed(String), + /// LFS operation failed. + LfsError(String), + /// Config error. + ConfigError(String), + /// I/O error. + IoError(String), + /// Authentication failed. + AuthFailed(String), + /// Permission denied. + PermissionDenied(String), + /// Locked resource. + Locked(String), + /// Internal error. + Internal(String), +} + +impl fmt::Display for GitError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + GitError::NotFound(s) => write!(f, "not found: {}", s), + GitError::ObjectNotFound(s) => write!(f, "object not found: {}", s), + GitError::RefNotFound(s) => write!(f, "ref not found: {}", s), + GitError::InvalidRefName(s) => write!(f, "invalid ref name: {}", s), + GitError::InvalidOid(s) => write!(f, "invalid oid: {}", s), + GitError::BranchExists(s) => write!(f, "branch already exists: {}", s), + GitError::TagExists(s) => write!(f, "tag already exists: {}", s), + GitError::BranchProtected(s) => write!(f, "branch is protected: {}", s), + GitError::MergeConflict(s) => write!(f, "merge conflict: {}", s), + GitError::HookFailed(s) => write!(f, "hook failed: {}", s), + GitError::LfsError(s) => write!(f, "lfs error: {}", s), + GitError::ConfigError(s) => write!(f, "config error: {}", s), + GitError::IoError(s) => write!(f, "io error: {}", s), + GitError::AuthFailed(s) => write!(f, "auth failed: {}", s), + GitError::PermissionDenied(s) => write!(f, "permission denied: {}", s), + GitError::Locked(s) => write!(f, "locked: {}", s), + GitError::Internal(s) => write!(f, "internal error: {}", s), + } + } +} + +impl std::error::Error for GitError {} + +impl From for GitError { + fn from(e: git2::Error) -> Self { + match e.code() { + git2::ErrorCode::NotFound => GitError::NotFound(e.message().to_string()), + git2::ErrorCode::Exists => GitError::BranchExists(e.message().to_string()), + git2::ErrorCode::InvalidSpec => GitError::InvalidRefName(e.message().to_string()), + git2::ErrorCode::MergeConflict => GitError::MergeConflict(e.message().to_string()), + git2::ErrorCode::Auth => GitError::AuthFailed(e.message().to_string()), + git2::ErrorCode::Invalid => GitError::InvalidOid(e.message().to_string()), + git2::ErrorCode::Locked => GitError::Locked(e.message().to_string()), + _ => GitError::Internal(e.message().to_string()), + } + } +} + +impl From for GitError { + fn from(e: std::io::Error) -> Self { + GitError::IoError(e.to_string()) + } +} + +impl From for GitError { + fn from(e: sea_orm::DbErr) -> Self { + GitError::Internal(format!("db error: {}", e)) + } +} diff --git a/libs/git/hook/event.rs b/libs/git/hook/event.rs new file mode 100644 index 0000000..75d4712 --- /dev/null +++ b/libs/git/hook/event.rs @@ -0,0 +1,7 @@ +use models::RepoId; +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize, Serialize, Clone, Debug, Ord, PartialOrd, PartialEq, Eq)] +pub enum GitHookEvent { + MetaDataSync(RepoId), +} diff --git a/libs/git/hook/mod.rs b/libs/git/hook/mod.rs new file mode 100644 index 0000000..9d1dc9e --- /dev/null +++ b/libs/git/hook/mod.rs @@ -0,0 +1,70 @@ +use config::AppConfig; +use db::cache::AppCache; +use db::database::AppDatabase; +use deadpool_redis::cluster::Pool as RedisPool; +use slog::Logger; +use std::sync::Arc; + +use crate::hook::pool::GitHookPool; + +#[derive(Clone)] +pub struct GitServiceHooks { + pub(crate) db: AppDatabase, + pub(crate) cache: AppCache, + pub(crate) redis_pool: RedisPool, + pub(crate) logger: Logger, + pub(crate) config: AppConfig, + pub(crate) http: Arc, +} + +impl GitServiceHooks { + pub fn new( + db: AppDatabase, + cache: AppCache, + redis_pool: RedisPool, + logger: Logger, + config: AppConfig, + http: Arc, + ) -> Self { + Self { + db, + cache, + redis_pool, + logger, + config, + http, + } + } + + pub async fn run( + self, + cancel: tokio_util::sync::CancellationToken, + ) -> Result<(), crate::GitError> { + let pool_config = config::hook::PoolConfig::from_env(&self.config); + + let pool = GitHookPool::new( + pool_config, + self.db, + self.cache, + self.redis_pool, + self.logger.clone(), + self.http, + ) + .await?; + + let pool_arc = Arc::new(pool); + + slog::info!(self.logger, "git hook service started"); + + pool_arc.run(cancel).await; + + slog::info!(self.logger, "git hook service stopped"); + + Ok(()) + } +} + +pub mod event; +pub mod pool; +pub mod sync; +pub mod webhook_dispatch; diff --git a/libs/git/hook/pool/log.rs b/libs/git/hook/pool/log.rs new file mode 100644 index 0000000..142ebd6 --- /dev/null +++ b/libs/git/hook/pool/log.rs @@ -0,0 +1,103 @@ +use deadpool_redis::cluster::Pool; +use serde::Serialize; +use std::sync::Arc; + +#[derive(Debug, Clone, Serialize)] +pub struct TaskLog { + pub task_id: String, + pub repo_id: String, + pub worker_id: String, + pub level: String, + pub message: String, + pub timestamp: chrono::DateTime, +} + +pub struct LogStream { + channel: String, + worker_id: String, + pool: Arc, +} + +impl Clone for LogStream { + fn clone(&self) -> Self { + Self { + channel: self.channel.clone(), + worker_id: self.worker_id.clone(), + pool: self.pool.clone(), + } + } +} + +impl LogStream { + pub fn new(channel: String, worker_id: String, pool: Arc) -> Self { + Self { + channel, + worker_id, + pool, + } + } + + async fn publish_log(&self, log: TaskLog) { + let data = match serde_json::to_vec(&log) { + Ok(d) => d, + Err(e) => { + eprintln!("failed to serialize log: {}", e); + return; + } + }; + + let redis = match self.pool.get().await { + Ok(c) => c, + Err(e) => { + eprintln!("redis pool get failed: {}", e); + return; + } + }; + + let mut conn: deadpool_redis::cluster::Connection = redis; + if let Err(e) = redis::cmd("PUBLISH") + .arg(&self.channel) + .arg(&data) + .query_async::<()>(&mut conn) + .await + { + eprintln!("Redis PUBLISH failed: {}", e); + } + } + + pub async fn info(&self, task_id: &str, repo_id: &str, message: &str) { + self.publish_log(TaskLog { + task_id: task_id.to_string(), + repo_id: repo_id.to_string(), + worker_id: self.worker_id.clone(), + level: "info".to_string(), + message: message.to_string(), + timestamp: chrono::Utc::now(), + }) + .await; + } + + pub async fn error(&self, task_id: &str, repo_id: &str, message: &str) { + self.publish_log(TaskLog { + task_id: task_id.to_string(), + repo_id: repo_id.to_string(), + worker_id: self.worker_id.clone(), + level: "error".to_string(), + message: message.to_string(), + timestamp: chrono::Utc::now(), + }) + .await; + } + + pub async fn warn(&self, task_id: &str, repo_id: &str, message: &str) { + self.publish_log(TaskLog { + task_id: task_id.to_string(), + repo_id: repo_id.to_string(), + worker_id: self.worker_id.clone(), + level: "warn".to_string(), + message: message.to_string(), + timestamp: chrono::Utc::now(), + }) + .await; + } +} diff --git a/libs/git/hook/pool/metrics.rs b/libs/git/hook/pool/metrics.rs new file mode 100644 index 0000000..ba58382 --- /dev/null +++ b/libs/git/hook/pool/metrics.rs @@ -0,0 +1,42 @@ +use std::sync::Arc; +use sysinfo::System; +use tokio::sync::RwLock; + +pub struct CpuMonitor { + sys: Arc>, +} + +impl CpuMonitor { + pub fn new() -> Self { + let mut sys = System::new(); + sys.refresh_cpu_all(); + Self { + sys: Arc::new(RwLock::new(sys)), + } + } + + pub async fn cpu_usage(&self) -> f32 { + let mut sys = self.sys.write().await; + sys.refresh_cpu_all(); + sys.global_cpu_usage() + } + + pub async fn can_accept_task( + &self, + max_concurrent: usize, + cpu_threshold: f32, + running: usize, + ) -> bool { + if running >= max_concurrent { + return false; + } + let cpu = self.cpu_usage().await; + cpu < cpu_threshold + } +} + +impl Default for CpuMonitor { + fn default() -> Self { + Self::new() + } +} diff --git a/libs/git/hook/pool/mod.rs b/libs/git/hook/pool/mod.rs new file mode 100644 index 0000000..82dd8de --- /dev/null +++ b/libs/git/hook/pool/mod.rs @@ -0,0 +1,481 @@ +pub mod log; +pub mod metrics; +pub mod redis; +pub mod types; + +use db::cache::AppCache; +use db::database::AppDatabase; +use deadpool_redis::cluster::Pool as RedisPool; +use sea_orm::EntityTrait; +use slog::Logger; +use std::sync::Arc; +use std::sync::atomic::{AtomicU64, Ordering}; +use tokio::sync::Semaphore; +use tokio::task::{JoinSet, spawn_blocking}; +use tokio_util::sync::CancellationToken; + +use crate::hook::pool::log::LogStream; +use crate::hook::pool::metrics::CpuMonitor; +use crate::hook::pool::redis::RedisConsumer; +use crate::hook::pool::types::{HookTask, PoolConfig, PoolMetrics, TaskType}; +use crate::hook::sync::HookMetaDataSync; + +pub struct GitHookPool { + config: PoolConfig, + db: AppDatabase, + cache: AppCache, + logger: Logger, + cpu_monitor: CpuMonitor, + consumer: RedisConsumer, + log_stream: LogStream, + running_count: Arc, + total_processed: Arc, + total_failed: Arc, + semaphore: Arc, + http: Arc, +} + +impl GitHookPool { + pub async fn new( + config: PoolConfig, + db: AppDatabase, + cache: AppCache, + redis_pool: RedisPool, + logger: Logger, + http: Arc, + ) -> Result { + let consumer = RedisConsumer::new( + redis_pool.clone(), + config.redis_list_prefix.clone(), + config.redis_block_timeout_secs, + logger.clone(), + ); + + let log_stream = LogStream::new( + config.redis_log_channel.clone(), + config.worker_id.clone(), + Arc::new(redis_pool), + ); + + Ok(Self { + config, + db, + cache, + logger, + cpu_monitor: CpuMonitor::new(), + consumer, + log_stream, + running_count: Arc::new(AtomicU64::new(0)), + total_processed: Arc::new(AtomicU64::new(0)), + total_failed: Arc::new(AtomicU64::new(0)), + semaphore: Arc::new(Semaphore::new(num_cpus::get())), + http, + }) + } + + pub async fn run(self: Arc, cancel: CancellationToken) { + let mut join_set = JoinSet::<()>::new(); + let cancel_clone = cancel.clone(); + + // Task types to poll + let task_types = [TaskType::Sync, TaskType::Fsck, TaskType::Gc]; + + loop { + tokio::select! { + _ = cancel_clone.cancelled() => { + slog::info!(self.logger, "pool received shutdown signal, draining {} tasks", join_set.len()); + while join_set.join_next().await.is_some() {} + slog::info!(self.logger, "pool shutdown complete"); + break; + } + + _ = tokio::time::sleep(tokio::time::Duration::from_millis(100)) => {} + } + + let running = self.running_count.load(Ordering::Relaxed) as usize; + let can_accept = self + .cpu_monitor + .can_accept_task( + self.config.max_concurrent, + self.config.cpu_threshold, + running, + ) + .await; + + if !can_accept { + tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; + continue; + } + + // Poll each task type in round-robin fashion + for task_type in &task_types { + let result = self.consumer.next(&task_type.to_string()).await; + + let (task, task_json) = match result { + Ok(Some(pair)) => pair, + Ok(None) => continue, // timeout, try next queue + Err(e) => { + slog::warn!(self.logger, "failed to dequeue task: {}", e); + tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; + break; + } + }; + + let self_clone = self.clone(); + + // Compute queue/work keys for ACK/NAK + let queue_key = format!( + "{}:{}", + self_clone.config.redis_list_prefix, + task_type.to_string() + ); + let work_key = format!("{}:work", queue_key); + + let permit = match self_clone.semaphore.clone().acquire_owned().await { + Ok(p) => p, + Err(_) => continue, + }; + + let self_clone2 = self.clone(); + self_clone2.running_count.fetch_add(1, Ordering::Relaxed); + let logger_clone = self_clone2.logger.clone(); + let counter_clone = self_clone2.running_count.clone(); + join_set.spawn(async move { + let panicked = match spawn_blocking(move || { + std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + tokio::runtime::Handle::current().block_on(async { + self_clone2 + .execute_task_body(task, task_json, queue_key, work_key) + .await + }) + })) + }) + .await + { + Ok(Ok(Ok(()))) => false, // spawn_blocking Ok, catch_unwind Ok, body Ok + Ok(Ok(Err(_))) => true, // spawn_blocking Ok, catch_unwind Ok, body Err(()) — never hit + Ok(Err(_)) => true, // spawn_blocking Ok, catch_unwind Err = panic + Err(_) => true, // spawn_blocking Err = thread aborted + }; + drop(permit); + counter_clone.fetch_sub(1, Ordering::Relaxed); + if panicked { + slog::error!(logger_clone, "task panicked";); + } + }); + + // Only process one task per loop iteration to avoid overwhelming the pool + break; + } + } + } + + async fn execute_task_body( + &self, + task: HookTask, + task_json: String, + queue_key: String, + work_key: String, + ) -> Result<(), ()> { + slog::info!(self.logger, "task started"; + "task_id" => &task.id, + "task_type" => %task.task_type, + "repo_id" => &task.repo_id, + "worker_id" => &self.config.worker_id + ); + + self.log_stream + .info( + &task.id, + &task.repo_id, + &format!("task started: {}", task.task_type), + ) + .await; + + let result = match task.task_type { + TaskType::Sync => self.run_sync(&task).await, + TaskType::Fsck => self.run_fsck(&task).await, + TaskType::Gc => self.run_gc(&task).await, + }; + + let consumer = self.consumer.clone(); + match result { + Ok(()) => { + if let Err(e) = consumer.ack(&work_key, &task_json).await { + slog::warn!(self.logger, "failed to ack task: {}", e); + } + self.total_processed.fetch_add(1, Ordering::Relaxed); + self.log_stream + .info(&task.id, &task.repo_id, "task completed") + .await; + } + Err(e) => { + if let Err(e) = consumer.nak(&work_key, &queue_key, &task_json).await { + slog::warn!(self.logger, "failed to nak task: {}", e); + } + self.total_failed.fetch_add(1, Ordering::Relaxed); + self.log_stream + .error(&task.id, &task.repo_id, &format!("task failed: {}", e)) + .await; + } + } + + Ok(()) + } + + async fn run_sync(&self, task: &HookTask) -> Result<(), crate::GitError> { + let repo_id = models::Uuid::parse_str(&task.repo_id) + .map_err(|_| crate::GitError::Internal("invalid repo_id uuid".into()))?; + + let repo = models::repos::repo::Entity::find_by_id(repo_id) + .one(self.db.reader()) + .await + .map_err(crate::GitError::from)? + .ok_or_else(|| crate::GitError::NotFound(format!("repo {} not found", repo_id)))?; + + let db_clone = self.db.clone(); + let cache_clone = self.cache.clone(); + let repo_clone = repo.clone(); + let logger_clone = self.logger.clone(); + + // Phase 1: capture before branch/tag tips. + let before_tips: (Vec<(String, String)>, Vec<(String, String)>) = + tokio::task::spawn_blocking({ + let db = db_clone.clone(); + let cache = cache_clone.clone(); + let repo = repo_clone.clone(); + let logger = logger_clone.clone(); + move || { + let sync = HookMetaDataSync::new(db, cache, repo, logger)?; + Ok::<_, crate::GitError>((sync.list_branch_tips(), sync.list_tag_tips())) + } + }) + .await + .map_err(|e| crate::GitError::Internal(format!("spawn_blocking failed: {}", e)))??; + + // Phase 2: run sync (async operation). + let sync_result: Result<(), crate::GitError> = tokio::task::spawn_blocking({ + let db = db_clone.clone(); + let cache = cache_clone.clone(); + let repo = repo_clone.clone(); + let logger = logger_clone.clone(); + move || { + let sync = HookMetaDataSync::new(db, cache, repo, logger)?; + tokio::runtime::Handle::current().block_on(async { sync.sync().await }) + } + }) + .await + .map_err(|e| crate::GitError::Internal(format!("spawn_blocking failed: {}", e)))?; + + sync_result?; + + // Phase 3: capture after branch/tag tips. + let after_tips: (Vec<(String, String)>, Vec<(String, String)>) = + tokio::task::spawn_blocking({ + let db = db_clone.clone(); + let cache = cache_clone.clone(); + let repo = repo_clone.clone(); + let logger = logger_clone.clone(); + move || { + let sync = HookMetaDataSync::new(db, cache, repo, logger)?; + Ok::<_, crate::GitError>((sync.list_branch_tips(), sync.list_tag_tips())) + } + }) + .await + .map_err(|e| crate::GitError::Internal(format!("spawn_blocking failed: {}", e)))??; + + let (before_branch_tips, before_tag_tips) = before_tips; + let (after_branch_tips, after_tag_tips) = after_tips; + + let repo_uuid = repo.id.to_string(); + let repo_name = repo.repo_name.clone(); + let default_branch = repo.default_branch.clone(); + + // Resolve namespace = project.name + let namespace = models::projects::Project::find_by_id(repo.project) + .one(self.db.reader()) + .await + .map_err(|e| crate::GitError::Internal(format!("failed to fetch project: {}", e)))? + .map(|p| p.name) + .unwrap_or_default(); + + let logger = self.logger.clone(); + let http = self.http.clone(); + let db = self.db.clone(); + + // Dispatch branch push webhooks. + for (branch, after_oid) in &after_branch_tips { + let before_oid = before_branch_tips + .iter() + .find(|(n, _)| n == branch) + .map(|(_, o)| o.as_str()); + let changed = before_oid.map(|o| o != after_oid.as_str()).unwrap_or(true); + if changed { + let before_oid = before_oid.map_or("0", |v| v).to_string(); + let after = after_oid.clone(); + let branch_name = branch.clone(); + + slog::info!(logger, "detected push on branch"; "branch" => &branch_name, "before" => &before_oid, "after" => &after); + + let http = http.clone(); + let db = db.clone(); + let logs = logger.clone(); + let ru = repo_uuid.clone(); + let ns = namespace.clone(); + let rn = repo_name.clone(); + let db_branch = default_branch.clone(); + + tokio::spawn(async move { + crate::hook::webhook_dispatch::dispatch_repo_webhooks( + &db, + &http, + &logs, + &ru, + &ns, + &rn, + &db_branch, + "", + "", + crate::hook::webhook_dispatch::WebhookEventKind::Push { + r#ref: format!("refs/heads/{}", branch_name), + before: before_oid, + after, + commits: vec![], + }, + ) + .await; + }); + } + } + + // Dispatch tag push webhooks. + for (tag, after_oid) in &after_tag_tips { + let before_oid = before_tag_tips + .iter() + .find(|(n, _)| n == tag) + .map(|(_, o)| o.as_str()); + let is_new = before_oid.is_none(); + let was_updated = before_oid.map(|o| o != after_oid.as_str()).unwrap_or(false); + if is_new || was_updated { + let before_oid = before_oid.map_or("0", |v| v).to_string(); + let after = after_oid.clone(); + let tag_name = tag.clone(); + + slog::info!(logger, "detected tag push"; "tag" => &tag_name, "before" => &before_oid, "after" => &after); + + let http = http.clone(); + let db = db.clone(); + let logs = logger.clone(); + let ru = repo_uuid.clone(); + let ns = namespace.clone(); + let rn = repo_name.clone(); + let db_branch = default_branch.clone(); + + tokio::spawn(async move { + crate::hook::webhook_dispatch::dispatch_repo_webhooks( + &db, + &http, + &logs, + &ru, + &ns, + &rn, + &db_branch, + "", + "", + crate::hook::webhook_dispatch::WebhookEventKind::TagPush { + r#ref: format!("refs/tags/{}", tag_name), + before: before_oid, + after, + }, + ) + .await; + }); + } + } + + Ok(()) + } + + async fn run_fsck(&self, task: &HookTask) -> Result<(), crate::GitError> { + let repo_id = models::Uuid::parse_str(&task.repo_id) + .map_err(|_| crate::GitError::Internal("invalid repo_id uuid".into()))?; + + let repo = models::repos::repo::Entity::find_by_id(repo_id) + .one(self.db.reader()) + .await + .map_err(crate::GitError::from)? + .ok_or_else(|| crate::GitError::NotFound(format!("repo {} not found", repo_id)))?; + + self.log_stream + .info(&task.id, &task.repo_id, "running fsck") + .await; + + let db_clone = self.db.clone(); + let cache_clone = self.cache.clone(); + let logger_clone = self.logger.clone(); + + tokio::task::spawn_blocking(move || -> Result<(), crate::GitError> { + tokio::runtime::Handle::current().block_on(async move { + let sync = + HookMetaDataSync::new(db_clone.clone(), cache_clone, repo, logger_clone)?; + let mut txn = db_clone.begin().await.map_err(crate::GitError::from)?; + sync.run_fsck_and_rollback_if_corrupt(&mut txn).await + }) + }) + .await + .map_err(|e| crate::GitError::Internal(format!("spawn_blocking failed: {}", e)))??; + + Ok(()) + } + + async fn run_gc(&self, task: &HookTask) -> Result<(), crate::GitError> { + let repo_id = models::Uuid::parse_str(&task.repo_id) + .map_err(|_| crate::GitError::Internal("invalid repo_id uuid".into()))?; + + let repo = models::repos::repo::Entity::find_by_id(repo_id) + .one(self.db.reader()) + .await + .map_err(crate::GitError::from)? + .ok_or_else(|| crate::GitError::NotFound(format!("repo {} not found", repo_id)))?; + + self.log_stream + .info(&task.id, &task.repo_id, "running gc") + .await; + + let db_clone = self.db.clone(); + let cache_clone = self.cache.clone(); + let logger_clone = self.logger.clone(); + + tokio::task::spawn_blocking(move || -> Result<(), crate::GitError> { + tokio::runtime::Handle::current().block_on(async move { + let sync = HookMetaDataSync::new(db_clone, cache_clone, repo, logger_clone)?; + sync.run_gc().await + }) + }) + .await + .map_err(|e| crate::GitError::Internal(format!("spawn_blocking failed: {}", e)))??; + + Ok(()) + } + + pub fn metrics(&self) -> PoolMetrics { + let running = self.running_count.load(Ordering::Relaxed) as usize; + PoolMetrics { + running, + max_concurrent: self.config.max_concurrent, + cpu_usage: 0.0, + total_processed: self.total_processed.load(Ordering::Relaxed), + total_failed: self.total_failed.load(Ordering::Relaxed), + can_accept: running < self.config.max_concurrent, + } + } + + pub fn can_accept_task_sync(&self) -> bool { + let running = self.running_count.load(Ordering::Relaxed) as usize; + running < self.config.max_concurrent + } + + pub fn log_stream(&self) -> &LogStream { + &self.log_stream + } +} diff --git a/libs/git/hook/pool/redis.rs b/libs/git/hook/pool/redis.rs new file mode 100644 index 0000000..95c11e4 --- /dev/null +++ b/libs/git/hook/pool/redis.rs @@ -0,0 +1,165 @@ +use crate::error::GitError; +use crate::hook::pool::types::HookTask; +use deadpool_redis::cluster::Connection as RedisConn; +use slog::Logger; + +/// Redis List consumer using BLMOVE for atomic move-from-queue-to-work pattern. +/// Compatible with Redis Cluster via hash tags in key names. +pub struct RedisConsumer { + pool: deadpool_redis::cluster::Pool, + /// Hash-tag-prefixed key prefix, e.g. "{hook}". + /// Full queue key: "{hook}:{task_type}" + /// Full work key: "{hook}:{task_type}:work" + prefix: String, + block_timeout_secs: u64, + logger: Logger, +} + +impl RedisConsumer { + pub fn new( + pool: deadpool_redis::cluster::Pool, + prefix: String, + block_timeout_secs: u64, + logger: Logger, + ) -> Self { + Self { + pool, + prefix, + block_timeout_secs, + logger, + } + } + + /// Atomically moves a task from the main queue to the work queue using BLMOVE. + /// Blocks up to `block_timeout_secs` waiting for a task. + /// + /// Returns `Some((HookTask, task_json))` where `task_json` is the raw JSON string + /// needed for LREM on ACK. Returns `None` if the blocking timed out. + pub async fn next(&self, task_type: &str) -> Result, GitError> { + let queue_key = format!("{}:{}", self.prefix, task_type); + let work_key = format!("{}:{}:work", self.prefix, task_type); + + let redis = self + .pool + .get() + .await + .map_err(|e| GitError::Internal(format!("redis pool get failed: {}", e)))?; + + let mut conn: RedisConn = redis; + + // BLMOVE source destination timeout + // RIGHT LEFT = BRPOPLPUSH equivalent (pop from right of src, push to left of dst) + let task_json: Option = redis::cmd("BLMOVE") + .arg(&queue_key) + .arg(&work_key) + .arg("RIGHT") + .arg("LEFT") + .arg(self.block_timeout_secs) + .query_async(&mut conn) + .await + .map_err(|e| GitError::Internal(format!("BLMOVE failed: {}", e)))?; + + match task_json { + Some(json) => { + match serde_json::from_str::(&json) { + Ok(task) => { + slog::debug!(self.logger, "task dequeued"; + "task_id" => %task.id, + "task_type" => %task.task_type, + "queue" => %queue_key + ); + Ok(Some((task, json))) + } + Err(e) => { + // Malformed task — remove from work queue and discard + slog::warn!(self.logger, "malformed task JSON, discarding"; + "error" => %e, + "queue" => %work_key + ); + let _ = self.ack_raw(&work_key, &json).await; + Ok(None) + } + } + } + None => { + // Timed out, no task available + Ok(None) + } + } + } + + /// Acknowledge a task: remove it from the work queue (LREM). + pub async fn ack(&self, work_key: &str, task_json: &str) -> Result<(), GitError> { + self.ack_raw(work_key, task_json).await + } + + async fn ack_raw(&self, work_key: &str, task_json: &str) -> Result<(), GitError> { + let redis = self + .pool + .get() + .await + .map_err(|e| GitError::Internal(format!("redis pool get failed: {}", e)))?; + + let mut conn: RedisConn = redis; + + let _: i64 = redis::cmd("LREM") + .arg(work_key) + .arg(-1) // remove all occurrences + .arg(task_json) + .query_async(&mut conn) + .await + .map_err(|e| GitError::Internal(format!("LREM failed: {}", e)))?; + + Ok(()) + } + + /// Negative acknowledge (retry): remove from work queue and push back to main queue. + pub async fn nak( + &self, + work_key: &str, + queue_key: &str, + task_json: &str, + ) -> Result<(), GitError> { + // First remove from work queue + self.ack_raw(work_key, task_json).await?; + + // Then push back to main queue for retry + let redis = self + .pool + .get() + .await + .map_err(|e| GitError::Internal(format!("redis pool get failed: {}", e)))?; + + let mut conn: RedisConn = redis; + + let _: i64 = redis::cmd("LPUSH") + .arg(queue_key) + .arg(task_json) + .query_async(&mut conn) + .await + .map_err(|e| GitError::Internal(format!("LPUSH retry failed: {}", e)))?; + + slog::warn!(self.logger, "task nack'd and requeued"; "queue" => %queue_key); + + Ok(()) + } + + pub fn pool(&self) -> &deadpool_redis::cluster::Pool { + &self.pool + } + + pub fn prefix(&self) -> &str { + &self.prefix + } +} + +impl Clone for RedisConsumer { + fn clone(&self) -> Self { + Self { + pool: self.pool.clone(), + prefix: self.prefix.clone(), + block_timeout_secs: self.block_timeout_secs, + logger: self.logger.clone(), + } + } +} diff --git a/libs/git/hook/pool/types.rs b/libs/git/hook/pool/types.rs new file mode 100644 index 0000000..fc07999 --- /dev/null +++ b/libs/git/hook/pool/types.rs @@ -0,0 +1,40 @@ +use serde::{Deserialize, Serialize}; + +pub use config::hook::PoolConfig; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HookTask { + pub id: String, + pub repo_id: String, + pub task_type: TaskType, + pub payload: serde_json::Value, + pub created_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum TaskType { + Sync, + Fsck, + Gc, +} + +impl std::fmt::Display for TaskType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TaskType::Sync => write!(f, "sync"), + TaskType::Fsck => write!(f, "fsck"), + TaskType::Gc => write!(f, "gc"), + } + } +} + +#[derive(Debug, Clone)] +pub struct PoolMetrics { + pub running: usize, + pub max_concurrent: usize, + pub cpu_usage: f32, + pub total_processed: u64, + pub total_failed: u64, + pub can_accept: bool, +} diff --git a/libs/git/hook/sync/branch.rs b/libs/git/hook/sync/branch.rs new file mode 100644 index 0000000..74221f7 --- /dev/null +++ b/libs/git/hook/sync/branch.rs @@ -0,0 +1,141 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use db::database::AppTransaction; +use models::repos::repo; +use models::repos::repo_branch; +use sea_orm::prelude::Expr; +use sea_orm::*; +use std::collections::HashSet; + +impl HookMetaDataSync { + pub async fn sync_refs(&self, txn: &AppTransaction) -> Result<(), GitError> { + let repo_id = self.repo.id; + let now = chrono::Utc::now(); + + let existing: Vec = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo_id)) + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query branches: {}", e)))?; + let mut existing_names: HashSet = existing.iter().map(|r| r.name.clone()).collect(); + + let references = self + .domain + .repo() + .references() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Auto-detect first local branch when default_branch is empty + let mut auto_detected_branch: Option = None; + + for reference in references { + let reference = reference.map_err(|e| GitError::Internal(e.to_string()))?; + let name = reference + .name() + .ok_or_else(|| GitError::RefNotFound("unnamed ref".into()))? + .to_string(); + let shorthand = reference.shorthand().unwrap_or("").to_string(); + + let target_oid = match reference.target() { + Some(oid) => oid.to_string(), + None => continue, + }; + + let is_branch = reference.is_branch(); + let is_remote = reference.is_remote(); + + // Detect first local branch if no default is set + if self.repo.default_branch.is_empty() + && is_branch + && !is_remote + && auto_detected_branch.is_none() + { + auto_detected_branch = Some(shorthand.clone()); + } + + let upstream = if is_branch && !is_remote { + reference + .shorthand() + .map(|short| format!("refs/remotes/{{}}/{}", short)) + } else { + None + }; + + if existing_names.contains(&name) { + existing_names.remove(&name); + repo_branch::Entity::update_many() + .filter(repo_branch::Column::Repo.eq(repo_id)) + .filter(repo_branch::Column::Name.eq(&name)) + .col_expr(repo_branch::Column::Oid, Expr::value(&target_oid)) + .col_expr(repo_branch::Column::Upstream, Expr::value(upstream)) + .col_expr( + repo_branch::Column::Head, + Expr::value(is_branch && shorthand == self.repo.default_branch), + ) + .col_expr(repo_branch::Column::UpdatedAt, Expr::value(now)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to update branch: {}", e)))?; + } else { + let new_branch = repo_branch::ActiveModel { + repo: Set(repo_id), + name: Set(name), + oid: Set(target_oid), + upstream: Set(upstream), + head: Set(is_branch && shorthand == self.repo.default_branch), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + new_branch + .insert(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to insert branch: {}", e)))?; + } + } + + if !existing_names.is_empty() { + repo_branch::Entity::delete_many() + .filter(repo_branch::Column::Repo.eq(repo_id)) + .filter(repo_branch::Column::Name.is_in(existing_names)) + .exec(txn) + .await + .map_err(|e| { + GitError::IoError(format!("failed to delete stale branches: {}", e)) + })?; + } + + // Persist auto-detected default branch and update head flags + if let Some(ref branch_name) = auto_detected_branch { + // 1. Update the repo's default_branch + repo::Entity::update_many() + .filter(repo::Column::Id.eq(repo_id)) + .col_expr( + repo::Column::DefaultBranch, + Expr::value(branch_name.clone()), + ) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to set default branch: {}", e)))?; + + // 2. Clear head on all branches + repo_branch::Entity::update_many() + .filter(repo_branch::Column::Repo.eq(repo_id)) + .col_expr(repo_branch::Column::Head, Expr::value(false)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to clear head flags: {}", e)))?; + + // 3. Set head = true for the detected branch (it was inserted above) + repo_branch::Entity::update_many() + .filter(repo_branch::Column::Repo.eq(repo_id)) + .filter(repo_branch::Column::Name.eq(branch_name)) + .col_expr(repo_branch::Column::Head, Expr::value(true)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to set head flag: {}", e)))?; + } + + Ok(()) + } +} diff --git a/libs/git/hook/sync/commit.rs b/libs/git/hook/sync/commit.rs new file mode 100644 index 0000000..ca69188 --- /dev/null +++ b/libs/git/hook/sync/commit.rs @@ -0,0 +1,245 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use db::database::AppTransaction; +use models::repos::RepoCollaborator; +use models::repos::RepoCommit; +use models::repos::repo_collaborator; +use models::repos::repo_commit; +use models::users::user_email; +use sea_orm::*; +use sea_query::OnConflict; +use std::collections::{HashMap, HashSet}; + +impl HookMetaDataSync { + pub async fn sync_commits(&self, txn: &AppTransaction) -> Result<(), GitError> { + let repo_id = self.repo.id; + let repo = self.domain.repo(); + + if repo.is_empty().unwrap_or(true) { + return Ok(()); + } + + let existing_oids: Vec = RepoCommit::find() + .filter(repo_commit::Column::Repo.eq(repo_id)) + .select_only() + .column(repo_commit::Column::Oid) + .into_tuple() + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query commits: {}", e)))?; + let existing_set: HashSet = existing_oids.into_iter().collect(); + + let branch_names = self.list_branch_names(); + + let mut new_oid_list: Vec<(git2::Oid, String)> = Vec::new(); + for ref_name in &branch_names { + let mut revwalk = repo + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .push_ref(ref_name) + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME) + .map_err(|e| GitError::Internal(e.to_string()))?; + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + let oid_str = oid.to_string(); + if !existing_set.contains(&oid_str) + && !new_oid_list.iter().any(|(_, s)| s == &oid_str) + { + new_oid_list.push((oid, oid_str)); + } + } + } + + if new_oid_list.is_empty() { + return Ok(()); + } + + let mut author_emails: Vec = Vec::with_capacity(new_oid_list.len()); + let mut committer_emails: Vec = Vec::with_capacity(new_oid_list.len()); + let mut commits_data: Vec = Vec::with_capacity(new_oid_list.len()); + + for (oid, oid_str) in &new_oid_list { + let commit = repo + .find_commit(*oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let author = commit.author(); + let committer = commit.committer(); + + let a_email = author.email().unwrap_or("").to_string(); + let c_email = committer.email().unwrap_or("").to_string(); + + author_emails.push(a_email.clone()); + committer_emails.push(c_email.clone()); + + commits_data.push(CommitData { + oid: oid_str.clone(), + author_name: author.name().unwrap_or("").to_string(), + author_email: a_email, + committer_name: committer.name().unwrap_or("").to_string(), + committer_email: c_email, + message: commit.message().unwrap_or("").to_string(), + parent_ids: commit.parent_ids().map(|p| p.to_string()).collect(), + }); + } + + let user_map = self + .resolve_user_ids(&author_emails, &committer_emails, txn) + .await?; + + let all_emails: Vec<&str> = author_emails + .iter() + .chain(committer_emails.iter()) + .map(|s| s.as_str()) + .collect(); + self.ensure_collaborators(&all_emails, &user_map, txn) + .await?; + + let now = chrono::Utc::now(); + let mut batch = Vec::with_capacity(100); + + for data in commits_data { + let author_uid = user_map.get(&data.author_email).copied(); + let committer_uid = user_map.get(&data.committer_email).copied(); + + batch.push(repo_commit::ActiveModel { + repo: Set(repo_id), + oid: Set(data.oid), + author_name: Set(data.author_name), + author_email: Set(data.author_email), + author: Set(author_uid), + commiter_name: Set(data.committer_name), + commiter_email: Set(data.committer_email), + commiter: Set(committer_uid), + message: Set(data.message), + parent: Set(serde_json::json!(data.parent_ids)), + created_at: Set(now), + ..Default::default() + }); + + if batch.len() >= 100 { + RepoCommit::insert_many(std::mem::take(&mut batch)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to insert commits: {}", e)))?; + } + } + + if !batch.is_empty() { + RepoCommit::insert_many(batch) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to insert commits: {}", e)))?; + } + + Ok(()) + } + + async fn resolve_user_ids( + &self, + author_emails: &[String], + committer_emails: &[String], + txn: &AppTransaction, + ) -> Result, GitError> { + let mut emails: Vec<&str> = + Vec::with_capacity(author_emails.len() + committer_emails.len()); + for e in author_emails { + emails.push(e.as_str()); + } + for e in committer_emails { + emails.push(e.as_str()); + } + + let rows: Vec<(String, models::UserId)> = user_email::Entity::find() + .filter(user_email::Column::Email.is_in(emails)) + .select_only() + .column(user_email::Column::Email) + .column(user_email::Column::User) + .into_tuple() + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query user emails: {}", e)))?; + + let mut map = HashMap::new(); + for (email, uid) in rows { + map.insert(email, uid); + } + Ok(map) + } + + async fn ensure_collaborators( + &self, + emails: &[&str], + user_map: &HashMap, + txn: &AppTransaction, + ) -> Result<(), GitError> { + let repo_id = self.repo.id; + + let existing: Vec<(models::UserId,)> = RepoCollaborator::find() + .filter(repo_collaborator::Column::Repo.eq(repo_id)) + .select_only() + .column(repo_collaborator::Column::User) + .into_tuple() + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query collaborators: {}", e)))?; + let existing_set: HashSet = + existing.into_iter().map(|(uid,)| uid).collect(); + + let now = chrono::Utc::now(); + + for &email in emails { + if let Some(&uid) = user_map.get(email) { + if !existing_set.contains(&uid) { + let new_collab = repo_collaborator::ActiveModel { + repo: Set(repo_id), + user: Set(uid), + scope: Set("read".to_string()), + created_at: Set(now), + ..Default::default() + }; + // Use ON CONFLICT DO NOTHING so concurrent syncs don't collide. + let _ = RepoCollaborator::insert(new_collab) + .on_conflict( + OnConflict::columns([ + repo_collaborator::Column::Repo, + repo_collaborator::Column::User, + ]) + .do_nothing() + .to_owned(), + ) + .exec(txn) + .await; + } + } + } + + Ok(()) + } + + fn list_branch_names(&self) -> Vec { + let mut names = Vec::new(); + if let Ok(refs) = self.domain.repo().references() { + for r in refs.flatten() { + if r.is_branch() && !r.is_remote() { + if let Some(name) = r.name() { + names.push(name.to_string()); + } + } + } + } + names + } +} + +struct CommitData { + oid: String, + author_name: String, + author_email: String, + committer_name: String, + committer_email: String, + message: String, + parent_ids: Vec, +} diff --git a/libs/git/hook/sync/fsck.rs b/libs/git/hook/sync/fsck.rs new file mode 100644 index 0000000..d6def1d --- /dev/null +++ b/libs/git/hook/sync/fsck.rs @@ -0,0 +1,140 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use db::database::AppTransaction; +use models::system::notify; +use sea_orm::*; +use std::collections::HashMap; +use std::process::Command; + +impl HookMetaDataSync { + pub async fn run_fsck_and_rollback_if_corrupt( + &self, + txn: &AppTransaction, + ) -> Result<(), GitError> { + let snapshot = self.snapshot_refs(); + let storage_path = self.repo.storage_path.clone(); + let logger = self.logger.clone(); + + let fsck_errors = tokio::task::spawn_blocking(move || { + let output = Command::new("git") + .arg("-C") + .arg(&storage_path) + .arg("fsck") + .arg("--full") + .output() + .map_err(|e| GitError::IoError(format!("git fsck failed: {}", e)))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + slog::warn!( + logger, + "git fsck failed with code {:?}. stdout: {}, stderr: {}", + output.status.code(), + stdout, + stderr + ); + return Ok(Some(format!("{}\n{}", stdout, stderr))); + } + Ok::, GitError>(None) + }) + .await + .map_err(|e| GitError::Internal(format!("spawn_blocking join error: {}", e)))??; + + if let Some(errors) = fsck_errors { + self.rollback_refs(&snapshot).await; + + let notification = notify::ActiveModel { + user: Set(self.repo.created_by), + title: Set(format!( + "Repository sync rollback triggered: {}", + self.repo.repo_name + )), + description: Set(Some("Repository integrity check failed".to_string())), + content: Set(format!( + "Repository {} sync failed and has been rolled back.\nError: {}", + self.repo.repo_name, errors + )), + kind: Set(1), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + + notify::Entity::insert(notification) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to insert notification: {}", e)))?; + + return Err(GitError::Internal(format!( + "repository corruption detected: {}", + errors + ))); + } + + Ok(()) + } + + fn snapshot_refs(&self) -> HashMap { + let mut snapshot = HashMap::new(); + let repo = self.domain.repo(); + + if let Ok(refs) = repo.references() { + for r in refs.flatten() { + let name = match r.name() { + Some(n) => n.to_string(), + None => continue, + }; + let oid = match r.target() { + Some(o) => o.to_string(), + None => continue, + }; + if name.starts_with("refs/heads/") || name.starts_with("refs/tags/") { + snapshot.insert(name, oid); + } + } + } + snapshot + } + + async fn rollback_refs(&self, snapshot: &HashMap) { + let storage_path = self.repo.storage_path.clone(); + let logger = self.logger.clone(); + let refs: Vec<(String, String)> = snapshot + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect(); + + let _ = tokio::task::spawn_blocking(move || { + for (ref_name, oid) in &refs { + let status = Command::new("git") + .arg("-C") + .arg(&storage_path) + .arg("update-ref") + .arg("-m") + .arg("rollback: integrity check failed") + .arg(ref_name) + .arg(oid) + .arg("HEAD") + .status(); + + match status { + Ok(s) if s.success() => { + slog::info!(logger, "rolled back ref {} to {}", ref_name, oid); + } + Ok(s) => { + slog::error!( + logger, + "failed to rollback ref {}: git exited with {:?}", + ref_name, + s.code() + ); + } + Err(e) => { + slog::error!(logger, "failed to rollback ref {}: {}", ref_name, e); + } + } + } + }) + .await; + } +} diff --git a/libs/git/hook/sync/gc.rs b/libs/git/hook/sync/gc.rs new file mode 100644 index 0000000..963bd8f --- /dev/null +++ b/libs/git/hook/sync/gc.rs @@ -0,0 +1,29 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use std::process::Command; + +impl HookMetaDataSync { + pub async fn run_gc(&self) -> Result<(), GitError> { + let storage_path = self.repo.storage_path.clone(); + let logger = self.logger.clone(); + + tokio::task::spawn_blocking(move || { + let status = Command::new("git") + .arg("-C") + .arg(&storage_path) + .arg("gc") + .arg("--auto") + .arg("--quiet") + .status() + .map_err(|e| GitError::IoError(format!("git gc failed: {}", e)))?; + + if !status.success() { + slog::warn!(logger, "git gc exited with {:?}", status.code()); + } + + Ok::<(), GitError>(()) + }) + .await + .map_err(|e| GitError::Internal(format!("spawn_blocking join error: {}", e)))? + } +} diff --git a/libs/git/hook/sync/lfs.rs b/libs/git/hook/sync/lfs.rs new file mode 100644 index 0000000..796af51 --- /dev/null +++ b/libs/git/hook/sync/lfs.rs @@ -0,0 +1,89 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use db::database::AppTransaction; +use models::repos::repo_lfs_object; +use sea_orm::*; +use std::collections::HashSet; + +impl HookMetaDataSync { + pub async fn sync_lfs_objects(&self, txn: &AppTransaction) -> Result<(), GitError> { + let repo_id = self.repo.id; + + let existing: Vec = repo_lfs_object::Entity::find() + .filter(repo_lfs_object::Column::Repo.eq(repo_id)) + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query lfs objects: {}", e)))?; + + let mut existing_oids: HashSet = existing.into_iter().map(|o| o.oid).collect(); + + let local_oids = self.domain.lfs_object_list()?; + let now = chrono::Utc::now(); + + let mut new_objects = Vec::new(); + + for oid in local_oids { + let oid_str = oid.as_str().to_string(); + + if existing_oids.contains(&oid_str) { + existing_oids.remove(&oid_str); + continue; + } + + let path = match self.domain.lfs_object_path(&oid) { + Ok(p) => p, + Err(e) => { + slog::warn!( + self.logger, + "invalid LFS OID in local objects directory: {}", + e + ); + continue; + } + }; + let size = if let Ok(meta) = std::fs::metadata(&path) { + meta.len() as i64 + } else { + continue; + }; + + let storage_path = path.to_string_lossy().to_string(); + + new_objects.push(repo_lfs_object::ActiveModel { + repo: Set(repo_id), + oid: Set(oid_str), + size: Set(size), + storage_path: Set(storage_path), + uploaded_by: Set(None), + uploaded_at: Set(now), + ..Default::default() + }); + } + + if !new_objects.is_empty() { + // Insert in batches + for chunk in new_objects.chunks(100) { + repo_lfs_object::Entity::insert_many(chunk.to_vec()) + .exec(txn) + .await + .map_err(|e| { + GitError::IoError(format!("failed to insert lfs objects: {}", e)) + })?; + } + } + + // Remove objects that no longer exist on disk + if !existing_oids.is_empty() { + repo_lfs_object::Entity::delete_many() + .filter(repo_lfs_object::Column::Repo.eq(repo_id)) + .filter(repo_lfs_object::Column::Oid.is_in(existing_oids)) + .exec(txn) + .await + .map_err(|e| { + GitError::IoError(format!("failed to delete stale lfs objects: {}", e)) + })?; + } + + Ok(()) + } +} diff --git a/libs/git/hook/sync/lock.rs b/libs/git/hook/sync/lock.rs new file mode 100644 index 0000000..d9bb081 --- /dev/null +++ b/libs/git/hook/sync/lock.rs @@ -0,0 +1,63 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; + +impl HookMetaDataSync { + const LOCK_TTL_SECS: u64 = 60; + + pub async fn acquire_lock(&self) -> Result { + let lock_key = format!("git:repo:lock:{}", self.repo.id); + let lock_value = format!("{}:{}", uuid::Uuid::new_v4(), std::process::id()); + + let mut conn = self + .cache + .conn() + .await + .map_err(|e| GitError::IoError(format!("failed to get redis connection: {}", e)))?; + + let result: bool = redis::cmd("SET") + .arg(&lock_key) + .arg(&lock_value) + .arg("NX") + .arg("EX") + .arg(Self::LOCK_TTL_SECS) + .query_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to acquire lock: {}", e)))?; + + if result { + Ok(lock_value) + } else { + Err(GitError::Locked(format!( + "repository {} is locked by another process", + self.repo.id + ))) + } + } + + pub async fn release_lock(&self, lock_value: &str) -> Result<(), GitError> { + let lock_key = format!("git:repo:lock:{}", self.repo.id); + + let mut conn = self + .cache + .conn() + .await + .map_err(|e| GitError::IoError(format!("failed to get redis connection: {}", e)))?; + + let script = r#" + if redis.call("get", KEYS[1]) == ARGV[1] then + return redis.call("del", KEYS[1]) + else + return 0 + end + "#; + + let _: i32 = redis::Script::new(script) + .key(&lock_key) + .arg(lock_value) + .invoke_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to release lock: {}", e)))?; + + Ok(()) + } +} diff --git a/libs/git/hook/sync/mod.rs b/libs/git/hook/sync/mod.rs new file mode 100644 index 0000000..af840e4 --- /dev/null +++ b/libs/git/hook/sync/mod.rs @@ -0,0 +1,364 @@ +pub mod branch; +pub mod commit; +pub mod fsck; +pub mod gc; +pub mod lfs; +pub mod lock; +pub mod status; +pub mod tag; + +use db::cache::AppCache; +use db::database::AppDatabase; +use models::projects::project_skill::{Column as SkillCol, Entity as SkillEntity}; +use models::projects::project_skill::ActiveModel as SkillActiveModel; +use models::repos::repo::Model as RepoModel; +use models::RepoId; +use models::ActiveModelTrait; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, Set}; +use slog::Logger; +use std::collections::HashMap; +use std::path::Path; + +use crate::GitDomain; + +// ── Skill discovery (local, no service crate dependency) ──────────────────────── + +use sha1::Digest; + +/// Recursively scan `base` for files named `SKILL.md`. +/// The skill slug is `{short_repo_id}/{parent_dir_name}` to ensure uniqueness across repos. +/// Populates `commit_sha` (current HEAD) and `blob_hash` for each discovered file. +fn scan_skills_from_dir( + base: &Path, + repo_id: &RepoId, + commit_sha: &str, +) -> Result, std::io::Error> { + let repo_id_prefix = &repo_id.to_string()[..8]; + let mut discovered = Vec::new(); + let mut stack = vec![base.to_path_buf()]; + + while let Some(dir) = stack.pop() { + let entries = match std::fs::read_dir(&dir) { + Ok(e) => e, + Err(_) => continue, + }; + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + stack.push(path); + } else if path.file_name().and_then(|n| n.to_str()) == Some("SKILL.md") { + if let Some(dir_name) = path.parent() + .and_then(|p| p.file_name()) + .and_then(|n| n.to_str()) + .filter(|s| !s.starts_with('.')) + { + let slug = format!("{}/{}", repo_id_prefix, dir_name); + if let Ok(raw) = std::fs::read(&path) { + let blob_hash = git_blob_hash(&raw); + let mut skill = parse_skill_content(&slug, &raw); + skill.commit_sha = Some(commit_sha.to_string()); + skill.blob_hash = Some(blob_hash); + discovered.push(skill); + } + } + } + } + } + Ok(discovered) +} + +/// Compute the git blob SHA-1 hash of `content`. +/// Format: "blob {len}\0{data}" +fn git_blob_hash(content: &[u8]) -> String { + let size = content.len(); + let header = format!("blob {}\0", size); + let mut hasher = sha1::Sha1::new(); + hasher.update(header.as_bytes()); + hasher.update(content); + hex::encode(hasher.finalize()) +} + +/// Parse a SKILL.md file (raw bytes) to extract name, description, content, and frontmatter metadata. +fn parse_skill_content(slug: &str, raw: &[u8]) -> DiscoveredSkill { + let content = String::from_utf8_lossy(raw); + let (frontmatter, body) = extract_frontmatter(&content); + let metadata: serde_json::Value = frontmatter + .and_then(|fm| serde_json::from_str(fm).ok()) + .unwrap_or_default(); + + let name = metadata + .get("name") + .and_then(|v| v.as_str()) + .map(String::from) + .unwrap_or_else(|| slug.replace('-', " ").replace('_', " ")); + + let description = metadata + .get("description") + .and_then(|v| v.as_str()) + .map(String::from); + + DiscoveredSkill { + slug: slug.to_string(), + name, + description, + content: body.trim().to_string(), + metadata, + commit_sha: None, + blob_hash: None, + } +} + +/// A skill discovered in a repository. +struct DiscoveredSkill { + slug: String, + name: String, + description: Option, + content: String, + metadata: serde_json::Value, + commit_sha: Option, + blob_hash: Option, +} + +fn extract_frontmatter(raw: &str) -> (Option<&str>, &str) { + let trimmed = raw.trim_start(); + if !trimmed.starts_with("---") { + return (None, trimmed); + } + if let Some(end) = trimmed[3..].find("---") { + let fm = &trimmed[3..end + 3]; + let rest = trimmed[3 + end + 3..].trim_start(); + (Some(fm), rest) + } else { + (None, trimmed) + } +} + +#[derive(Clone)] +pub struct HookMetaDataSync { + pub db: AppDatabase, + pub cache: AppCache, + pub repo: RepoModel, + pub domain: GitDomain, + pub logger: Logger, +} + +impl HookMetaDataSync { + pub fn new( + db: AppDatabase, + cache: AppCache, + repo: RepoModel, + logger: Logger, + ) -> Result { + let domain = GitDomain::from_model(repo.clone())?; + Ok(Self { + db, + cache, + repo, + domain, + logger, + }) + } + + pub async fn sync(&self) -> Result<(), crate::GitError> { + let lock_value = self.acquire_lock().await?; + + let res = self.sync_internal().await; + + if let Err(ref e) = res { + slog::error!(self.logger, "sync failed: {}", e); + } + + if let Err(release_err) = self.release_lock(&lock_value).await { + slog::error!(self.logger, "failed to release lock: {}", release_err); + } + + res + } + + async fn sync_internal(&self) -> Result<(), crate::GitError> { + let mut txn = + self.db.begin().await.map_err(|e| { + crate::GitError::IoError(format!("failed to begin transaction: {}", e)) + })?; + + self.sync_refs(&mut txn).await?; + self.sync_commits(&mut txn).await?; + self.sync_tags(&mut txn).await?; + self.sync_lfs_objects(&mut txn).await?; + + self.run_fsck_and_rollback_if_corrupt(&mut txn).await?; + + txn.commit().await.map_err(|e| { + crate::GitError::IoError(format!("failed to commit transaction: {}", e)) + })?; + + self.run_gc().await?; + + self.sync_skills().await; + + Ok(()) + } + + /// Returns a list of (branch_name, oid) for all local branches. + pub fn list_branch_tips(&self) -> Vec<(String, String)> { + let repo = self.domain.repo(); + let mut tips = Vec::new(); + if let Ok(refs) = repo.references() { + for ref_result in refs { + if let Ok(r) = ref_result { + if r.is_branch() && !r.is_remote() { + if let Some(name) = r.name() { + // name is like "refs/heads/main" -> extract "main" + let branch = name.strip_prefix("refs/heads/").unwrap_or(name); + if let Some(target) = r.target() { + tips.push((branch.to_string(), target.to_string())); + } + } + } + } + } + } + tips + } + + /// Returns a list of (tag_name, oid) for all tags. + pub fn list_tag_tips(&self) -> Vec<(String, String)> { + let repo = self.domain.repo(); + let mut tips = Vec::new(); + if let Ok(refs) = repo.references() { + for ref_result in refs { + if let Ok(r) = ref_result { + if r.is_tag() { + if let Some(name) = r.name() { + // name is like "refs/tags/v1.0" -> extract "v1.0" + let tag = name.strip_prefix("refs/tags/").unwrap_or(name); + if let Some(target) = r.target() { + tips.push((tag.to_string(), target.to_string())); + } + } + } + } + } + } + tips + } + + /// Scan the repository for `SKILL.md` files and sync skills to the project. + /// + /// This is a best-effort operation — failures are logged but do not fail the sync. + pub async fn sync_skills(&self) { + let project_uid = self.repo.project; + + let repo_root = match self.domain.repo().workdir() { + Some(p) => p, + None => return, + }; + + // Get current HEAD commit SHA for attribution + let commit_sha = self.domain.repo().head().ok() + .and_then(|h| h.target()) + .map(|oid| oid.to_string()) + .unwrap_or_default(); + + // Discover skills from the filesystem + let discovered = match scan_skills_from_dir(repo_root, &self.repo.id, &commit_sha) { + Ok(d) => d, + Err(e) => { + slog::warn!(self.logger, "failed to scan skills directory: {}", e); + return; + } + }; + + if discovered.is_empty() { + return; + } + + let now = chrono::Utc::now(); + let mut created = 0i64; + let mut updated = 0i64; + let mut removed = 0i64; + + // Collect existing repo-sourced skills for this repo + let existing: Vec<_> = match SkillEntity::find() + .filter(SkillCol::ProjectUuid.eq(project_uid)) + .filter(SkillCol::Source.eq("repo")) + .filter(SkillCol::RepoId.eq(self.repo.id)) + .all(&self.db) + .await + { + Ok(e) => e, + Err(e) => { + slog::warn!(self.logger, "failed to query existing skills: {}", e); + return; + } + }; + + let existing_by_slug: HashMap<_, _> = existing + .into_iter() + .map(|s| (s.slug.clone(), s)) + .collect(); + + let mut seen_slugs = std::collections::HashSet::new(); + + for skill in discovered { + seen_slugs.insert(skill.slug.clone()); + let json_meta = serde_json::to_value(&skill.metadata).unwrap_or_default(); + + if let Some(existing_skill) = existing_by_slug.get(&skill.slug) { + if existing_skill.content != skill.content + || existing_skill.metadata != json_meta + || existing_skill.commit_sha.as_ref() != skill.commit_sha.as_ref() + || existing_skill.blob_hash.as_ref() != skill.blob_hash.as_ref() + { + let mut active: SkillActiveModel = existing_skill.clone().into(); + active.content = Set(skill.content); + active.metadata = Set(json_meta); + active.commit_sha = Set(skill.commit_sha); + active.blob_hash = Set(skill.blob_hash); + active.updated_at = Set(now); + if active.update(&self.db).await.is_ok() { + updated += 1; + } + } + } else { + let active = SkillActiveModel { + id: Set(0), + project_uuid: Set(project_uid), + slug: Set(skill.slug.clone()), + name: Set(skill.name), + description: Set(skill.description), + source: Set("repo".to_string()), + repo_id: Set(Some(self.repo.id)), + commit_sha: Set(skill.commit_sha), + blob_hash: Set(skill.blob_hash), + content: Set(skill.content), + metadata: Set(json_meta), + enabled: Set(true), + created_by: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + if SkillEntity::insert(active).exec(&self.db).await.is_ok() { + created += 1; + } + } + } + + // Remove skills no longer in the repo + for (slug, old_skill) in existing_by_slug { + if !seen_slugs.contains(&slug) { + if SkillEntity::delete_by_id(old_skill.id).exec(&self.db).await.is_ok() { + removed += 1; + } + } + } + + if created > 0 || updated > 0 || removed > 0 { + slog::info!( + self.logger, + "skills synced: created={}, updated={}, removed={}", + created, updated, removed + ); + } + } +} diff --git a/libs/git/hook/sync/remote.rs b/libs/git/hook/sync/remote.rs new file mode 100644 index 0000000..e69de29 diff --git a/libs/git/hook/sync/status.rs b/libs/git/hook/sync/status.rs new file mode 100644 index 0000000..ce6f708 --- /dev/null +++ b/libs/git/hook/sync/status.rs @@ -0,0 +1,98 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; + +#[derive(Debug, Clone)] +pub enum SyncStatus { + Pending, + Processing, + Success, + Failed(String), +} + +impl std::fmt::Display for SyncStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SyncStatus::Pending => write!(f, "pending"), + SyncStatus::Processing => write!(f, "processing"), + SyncStatus::Success => write!(f, "success"), + SyncStatus::Failed(_) => write!(f, "failed"), + } + } +} + +impl HookMetaDataSync { + const STATUS_TTL_SECS: u64 = 86400; + + pub async fn update_sync_status(&self, status: SyncStatus) -> Result<(), GitError> { + let key = format!("git:repo:sync_status:{}", self.repo.id); + let status_str = status.to_string(); + + let mut conn = self + .cache + .conn() + .await + .map_err(|e| GitError::IoError(format!("failed to get redis connection: {}", e)))?; + + let _: () = redis::cmd("SETEX") + .arg(&key) + .arg(Self::STATUS_TTL_SECS) + .arg(&status_str) + .query_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to set sync status: {}", e)))?; + + if let SyncStatus::Failed(ref error_msg) = status { + let error_key = format!("git:repo:sync_error:{}", self.repo.id); + let _: () = redis::cmd("SETEX") + .arg(&error_key) + .arg(Self::STATUS_TTL_SECS) + .arg(error_msg) + .query_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to set sync error: {}", e)))?; + } + + Ok(()) + } + + pub async fn get_sync_status(&self) -> Result, GitError> { + let key = format!("git:repo:sync_status:{}", self.repo.id); + let error_key = format!("git:repo:sync_error:{}", self.repo.id); + + let mut conn = self + .cache + .conn() + .await + .map_err(|e| GitError::IoError(format!("failed to get redis connection: {}", e)))?; + + let status_str: Option = + redis::cmd("GET") + .arg(&key) + .query_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to get sync status: {}", e)))?; + + match status_str { + Some(status) => { + let error_msg: Option = redis::cmd("GET") + .arg(&error_key) + .query_async(&mut conn) + .await + .map_err(|e| GitError::IoError(format!("failed to get sync error: {}", e)))?; + + let sync_status = match status.as_str() { + "pending" => SyncStatus::Pending, + "processing" => SyncStatus::Processing, + "success" => SyncStatus::Success, + "failed" => { + SyncStatus::Failed(error_msg.unwrap_or_else(|| "Unknown error".to_string())) + } + _ => SyncStatus::Pending, + }; + + Ok(Some(sync_status)) + } + None => Ok(None), + } + } +} diff --git a/libs/git/hook/sync/tag.rs b/libs/git/hook/sync/tag.rs new file mode 100644 index 0000000..bf92cec --- /dev/null +++ b/libs/git/hook/sync/tag.rs @@ -0,0 +1,99 @@ +use crate::GitError; +use crate::hook::sync::HookMetaDataSync; +use db::database::AppTransaction; +use models::repos::repo_tag; +use sea_orm::prelude::Expr; +use sea_orm::*; +use std::collections::HashSet; + +impl HookMetaDataSync { + pub async fn sync_tags(&self, txn: &AppTransaction) -> Result<(), GitError> { + let repo_id = self.repo.id; + let repo = self.domain.repo(); + + let existing: Vec = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo_id)) + .all(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to query tags: {}", e)))?; + let mut existing_names: HashSet = existing.iter().map(|t| t.name.clone()).collect(); + + let tag_names = repo + .tag_names(None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + for tag_name in tag_names.iter().flatten() { + let full_ref = format!("refs/tags/{}", tag_name); + let reference = match repo.find_reference(&full_ref) { + Ok(r) => r, + Err(_) => continue, + }; + + let target_oid = match reference.target() { + Some(oid) => oid.to_string(), + None => continue, + }; + + let (description, tagger_name, tagger_email) = if reference.is_tag() { + if let Ok(tag) = reference.peel_to_tag() { + let description = tag.message().map(|s| s.to_string()); + if let Some(tagger) = tag.tagger() { + ( + description, + tagger.name().unwrap_or("").to_string(), + tagger.email().unwrap_or("").to_string(), + ) + } else { + (description, String::new(), String::new()) + } + } else { + (None, String::new(), String::new()) + } + } else { + (None, String::new(), String::new()) + }; + + if existing_names.contains(tag_name) { + existing_names.remove(tag_name); + repo_tag::Entity::update_many() + .filter(repo_tag::Column::Repo.eq(repo_id)) + .filter(repo_tag::Column::Name.eq(tag_name)) + .col_expr(repo_tag::Column::Oid, Expr::value(&target_oid)) + .col_expr(repo_tag::Column::Description, Expr::value(description)) + .col_expr(repo_tag::Column::TaggerName, Expr::value(&tagger_name)) + .col_expr(repo_tag::Column::TaggerEmail, Expr::value(&tagger_email)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to update tag: {}", e)))?; + } else { + let new_tag = repo_tag::ActiveModel { + repo: Set(repo_id), + name: Set(tag_name.to_string()), + oid: Set(target_oid), + color: Set(None), + description: Set(description), + created_at: Set(chrono::Utc::now()), + tagger_name: Set(tagger_name), + tagger_email: Set(tagger_email), + tagger: Set(None), + ..Default::default() + }; + new_tag + .insert(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to insert tag: {}", e)))?; + } + } + + if !existing_names.is_empty() { + repo_tag::Entity::delete_many() + .filter(repo_tag::Column::Repo.eq(repo_id)) + .filter(repo_tag::Column::Name.is_in(existing_names)) + .exec(txn) + .await + .map_err(|e| GitError::IoError(format!("failed to delete stale tags: {}", e)))?; + } + + Ok(()) + } +} diff --git a/libs/git/hook/webhook_dispatch.rs b/libs/git/hook/webhook_dispatch.rs new file mode 100644 index 0000000..53dca25 --- /dev/null +++ b/libs/git/hook/webhook_dispatch.rs @@ -0,0 +1,410 @@ +use db::database::AppDatabase; +use serde::Deserialize; +use sha2::{Digest, Sha256}; +use std::time::Duration; +use tokio::time::timeout; + +/// Compute HMAC-SHA256 of `body` with `secret`, returning "sha256=" or None if secret is empty. +pub fn sign_payload(body: &[u8], secret: &str) -> Option { + if secret.is_empty() { + return None; + } + + // HMAC-SHA256: inner = SHA256(k XOR ipad || text), outer = SHA256(k XOR opad || inner) + const IPAD: u8 = 0x36; + const OPAD: u8 = 0x5c; + const BLOCK_SIZE: usize = 64; // SHA256 block size + + // Pad or hash key to 64 bytes + let key = if secret.len() > BLOCK_SIZE { + Sha256::digest(secret.as_bytes()).to_vec() + } else { + secret.as_bytes().to_vec() + }; + let mut key_block = vec![0u8; BLOCK_SIZE]; + key_block[..key.len()].copy_from_slice(&key); + + // k_ipad = key_block XOR ipad, k_opad = key_block XOR opad + let mut k_ipad = [0u8; BLOCK_SIZE]; + let mut k_opad = [0u8; BLOCK_SIZE]; + for i in 0..BLOCK_SIZE { + k_ipad[i] = key_block[i] ^ IPAD; + k_opad[i] = key_block[i] ^ OPAD; + } + + // inner = SHA256(k_ipad || body) + let mut inner_hasher = Sha256::new(); + inner_hasher.update(&k_ipad); + inner_hasher.update(body); + let inner = inner_hasher.finalize(); + + // outer = SHA256(k_opad || inner) + let mut outer_hasher = Sha256::new(); + outer_hasher.update(&k_opad); + outer_hasher.update(inner); + let result = outer_hasher.finalize(); + + Some(format!( + "sha256={}", + result + .iter() + .map(|b| format!("{:02x}", b)) + .collect::() + )) +} + +#[derive(Debug, Clone, Default, Deserialize)] +pub struct WebhookEvents { + pub push: bool, + pub tag_push: bool, + pub pull_request: bool, + pub issue_comment: bool, + pub release: bool, +} + +impl From for WebhookEvents { + fn from(v: serde_json::Value) -> Self { + Self { + push: v.get("push").and_then(|v| v.as_bool()).unwrap_or(false), + tag_push: v.get("tag_push").and_then(|v| v.as_bool()).unwrap_or(false), + pull_request: v + .get("pull_request") + .and_then(|v| v.as_bool()) + .unwrap_or(false), + issue_comment: v + .get("issue_comment") + .and_then(|v| v.as_bool()) + .unwrap_or(false), + release: v.get("release").and_then(|v| v.as_bool()).unwrap_or(false), + } + } +} + +#[derive(Debug, serde::Serialize)] +pub struct PushPayload { + #[serde(rename = "ref")] + pub r#ref: String, + pub before: String, + pub after: String, + pub repository: RepositoryPayload, + pub pusher: PusherPayload, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub commits: Vec, +} + +#[derive(Debug, serde::Serialize)] +pub struct TagPushPayload { + #[serde(rename = "ref")] + pub r#ref: String, + pub before: String, + pub after: String, + pub repository: RepositoryPayload, + pub pusher: PusherPayload, +} + +#[derive(Debug, serde::Serialize)] +pub struct RepositoryPayload { + pub id: String, + pub name: String, + pub full_name: String, + pub namespace: String, + pub default_branch: String, +} + +#[derive(Debug, serde::Serialize)] +pub struct PusherPayload { + pub name: String, + pub email: String, +} + +#[derive(Debug, serde::Serialize)] +pub struct CommitPayload { + pub id: String, + pub message: String, + pub author: AuthorPayload, +} + +#[derive(Debug, serde::Serialize)] +pub struct AuthorPayload { + pub name: String, + pub email: String, +} + +#[derive(Debug)] +pub enum DispatchError { + Timeout, + ConnectionFailed, + RequestFailed(String), + HttpError(u16), +} + +impl std::fmt::Display for DispatchError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DispatchError::Timeout => write!(f, "timeout"), + DispatchError::ConnectionFailed => write!(f, "connection failed"), + DispatchError::RequestFailed(s) => write!(f, "request failed: {}", s), + DispatchError::HttpError(code) => write!(f, "http error: {}", code), + } + } +} + +pub async fn deliver( + client: &reqwest::Client, + url: &str, + secret: Option<&str>, + content_type: &str, + body: &[u8], +) -> Result<(), DispatchError> { + let mut req = client + .post(url) + .header("Content-Type", content_type) + .header("User-Agent", "Code-Git-Hook/1.0") + .timeout(Duration::from_secs(10)) + .body(body.to_vec()); + + if let Some(secret) = secret { + if let Some(sig) = sign_payload(body, secret) { + req = req.header("X-Hub-Signature-256", sig); + } + } + + let resp = req.send().await.map_err(|e| { + if e.is_timeout() { + DispatchError::Timeout + } else if e.is_connect() { + DispatchError::ConnectionFailed + } else { + DispatchError::RequestFailed(e.to_string()) + } + })?; + + if resp.status().is_success() { + Ok(()) + } else { + Err(DispatchError::HttpError(resp.status().as_u16())) + } +} + +pub struct CommitDispatch { + pub id: String, + pub message: String, + pub author_name: String, + pub author_email: String, +} + +pub enum WebhookEventKind { + Push { + r#ref: String, + before: String, + after: String, + commits: Vec, + }, + TagPush { + r#ref: String, + before: String, + after: String, + }, +} + +/// Dispatch webhooks for a repository after a push or tag event. +/// Queries active webhooks from the DB and sends HTTP POST requests. +pub async fn dispatch_repo_webhooks( + db: &AppDatabase, + http: &reqwest::Client, + logs: &slog::Logger, + repo_uuid: &str, + namespace: &str, + repo_name: &str, + default_branch: &str, + pusher_name: &str, + pusher_email: &str, + event: WebhookEventKind, +) { + use models::repos::repo_webhook::{Column as RwCol, Entity as RepoWebhookEntity}; + use models::{ColumnTrait, EntityTrait, QueryFilter, Uuid}; + + let webhooks: Vec<::Model> = match RepoWebhookEntity::find() + .filter(RwCol::Repo.eq(Uuid::parse_str(repo_uuid).ok())) + .all(db.reader()) + .await + { + Ok(ws) => ws, + Err(e) => { + slog::error!(logs, "failed to query webhooks: {}", e; "repo" => repo_uuid); + return; + } + }; + + if webhooks.is_empty() { + return; + } + + for webhook in webhooks { + let event_config: WebhookEvents = + serde_json::from_value(webhook.event.clone()).unwrap_or_default(); + let content_type = webhook + .event + .get("content_type") + .and_then(|v: &serde_json::Value| v.as_str()) + .unwrap_or("application/json"); + let url = webhook.url.as_deref().unwrap_or(""); + + if url.is_empty() { + continue; + } + + let secret = webhook.secret_key.as_deref(); + + match &event { + WebhookEventKind::Push { + r#ref, + before, + after, + commits, + } => { + if !event_config.push { + continue; + } + let payload = PushPayload { + r#ref: r#ref.clone(), + before: before.clone(), + after: after.clone(), + repository: RepositoryPayload { + id: repo_uuid.to_owned(), + name: repo_name.to_owned(), + full_name: format!("{}/{}", namespace, repo_name), + namespace: namespace.to_owned(), + default_branch: default_branch.to_owned(), + }, + pusher: PusherPayload { + name: pusher_name.to_owned(), + email: pusher_email.to_owned(), + }, + commits: commits + .iter() + .map(|c| CommitPayload { + id: c.id.clone(), + message: c.message.clone(), + author: AuthorPayload { + name: c.author_name.clone(), + email: c.author_email.clone(), + }, + }) + .collect(), + }; + + let body = match serde_json::to_vec(&payload) { + Ok(b) => b, + Err(e) => { + slog::error!(logs, "failed to serialize push payload"; "error" => e.to_string()); + continue; + } + }; + + let webhook_id = webhook.id; + match timeout( + Duration::from_secs(10), + deliver(http, url, secret, content_type, &body), + ) + .await + { + Ok(Ok(())) => { + slog::info!(logs, "push webhook delivered"; "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, true, logs).await; + } + Ok(Err(e)) => { + slog::warn!(logs, "push webhook delivery failed"; "error" => e.to_string(), "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, false, logs).await; + } + Err(_) => { + slog::warn!(logs, "push webhook timed out"; "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, false, logs).await; + } + } + } + WebhookEventKind::TagPush { + r#ref, + before, + after, + } => { + if !event_config.tag_push { + continue; + } + let payload = TagPushPayload { + r#ref: r#ref.clone(), + before: before.clone(), + after: after.clone(), + repository: RepositoryPayload { + id: repo_uuid.to_owned(), + name: repo_name.to_owned(), + full_name: format!("{}/{}", namespace, repo_name), + namespace: namespace.to_owned(), + default_branch: default_branch.to_owned(), + }, + pusher: PusherPayload { + name: pusher_name.to_owned(), + email: pusher_email.to_owned(), + }, + }; + + let body = match serde_json::to_vec(&payload) { + Ok(b) => b, + Err(e) => { + slog::error!(logs, "failed to serialize tag payload"; "error" => e.to_string()); + continue; + } + }; + + let webhook_id = webhook.id; + match timeout( + Duration::from_secs(10), + deliver(http, url, secret, content_type, &body), + ) + .await + { + Ok(Ok(())) => { + slog::info!(logs, "tag webhook delivered"; "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, true, logs).await; + } + Ok(Err(e)) => { + slog::warn!(logs, "tag webhook delivery failed"; "error" => e.to_string(), "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, false, logs).await; + } + Err(_) => { + slog::warn!(logs, "tag webhook timed out"; "webhook_id" => webhook_id, "url" => url); + let _ = touch_webhook(db, webhook_id, false, logs).await; + } + } + } + } + } +} + +async fn touch_webhook(db: &AppDatabase, webhook_id: i64, success: bool, logs: &slog::Logger) { + use models::repos::repo_webhook::{Column as RwCol, Entity as RepoWebhookEntity}; + use models::{ColumnTrait, EntityTrait, QueryFilter}; + use sea_orm::prelude::Expr; + + let result: Result = if success { + RepoWebhookEntity::update_many() + .filter(RwCol::Id.eq(webhook_id)) + .col_expr( + RwCol::LastDeliveredAt, + Expr::value(Some(chrono::Utc::now())), + ) + .col_expr(RwCol::TouchCount, Expr::value(1i64)) + .exec(db.writer()) + .await + } else { + RepoWebhookEntity::update_many() + .filter(RwCol::Id.eq(webhook_id)) + .col_expr(RwCol::TouchCount, Expr::value(1i64)) + .exec(db.writer()) + .await + }; + + if let Err(e) = result { + slog::warn!(logs, "failed to update webhook touch"; "error" => e.to_string()); + } +} diff --git a/libs/git/http/auth.rs b/libs/git/http/auth.rs new file mode 100644 index 0000000..aa592f7 --- /dev/null +++ b/libs/git/http/auth.rs @@ -0,0 +1,66 @@ +use crate::http::utils::{extract_basic_credentials, hash_access_key}; +use crate::ssh::authz::SshAuthService; +use actix_web::{Error, HttpRequest}; +use db::database::AppDatabase; +use models::repos::repo; +use models::users::{user, user_token}; +use sea_orm::sqlx::types::chrono; +use sea_orm::*; + +pub async fn verify_access_token( + db: &AppDatabase, + username: &str, + access_key: &str, +) -> Result { + let user = user::Entity::find() + .filter(user::Column::Username.eq(username)) + .one(db.reader()) + .await + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid username or access key"))? + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Invalid username or access key"))?; + + let token_hash = hash_access_key(access_key); + + let token = user_token::Entity::find() + .filter(user_token::Column::User.eq(user.uid)) + .filter(user_token::Column::TokenHash.eq(token_hash)) + .filter(user_token::Column::IsRevoked.eq(false)) + .one(db.reader()) + .await + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid username or access key"))? + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Invalid username or access key"))?; + + if let Some(expires_at) = token.expires_at { + if expires_at < chrono::Utc::now() { + return Err(actix_web::error::ErrorUnauthorized( + "Access key has expired", + )); + } + } + + Ok(user) +} + +pub async fn authorize_repo_access( + req: &HttpRequest, + db: &AppDatabase, + repo: &repo::Model, + is_write: bool, +) -> Result<(), Error> { + if !is_write && !repo.is_private { + return Ok(()); + } + + let (username, access_key) = extract_basic_credentials(req)?; + let user = verify_access_token(db, &username, &access_key).await?; + let authz = SshAuthService::new(db.clone(), slog::Logger::root(slog::Discard, slog::o!())); + + let can_access = authz.check_repo_permission(&user, repo, is_write).await; + if !can_access { + return Err(actix_web::error::ErrorForbidden( + "No permission for repository", + )); + } + + Ok(()) +} diff --git a/libs/git/http/handler.rs b/libs/git/http/handler.rs new file mode 100644 index 0000000..ad0bcdd --- /dev/null +++ b/libs/git/http/handler.rs @@ -0,0 +1,321 @@ +use actix_web::{Error, HttpResponse, web}; +use async_stream::stream; +use futures_util::Stream; +use futures_util::StreamExt; +use models::repos::{repo, repo_branch_protect}; +use sea_orm::*; +use std::path::PathBuf; +use std::pin::Pin; +use std::time::Duration; +use tokio::io::AsyncWriteExt; + +use db::database::AppDatabase; + +type ByteStream = Pin, std::io::Error>>>>; + +const PRE_PACK_LIMIT: usize = 1_048_576; +const GIT_OPERATION_TIMEOUT: Duration = Duration::from_secs(30); + +pub fn is_valid_oid(oid: &str) -> bool { + oid.len() == 40 && oid.chars().all(|c| c.is_ascii_hexdigit()) +} + +pub struct GitHttpHandler { + storage_path: PathBuf, + repo: repo::Model, + db: AppDatabase, +} + +impl GitHttpHandler { + pub fn new(storage_path: PathBuf, repo: repo::Model, db: AppDatabase) -> Self { + Self { + storage_path, + repo, + db, + } + } + + pub async fn upload_pack(&self, payload: web::Payload) -> Result { + self.handle_git_rpc("upload-pack", payload).await + } + + pub async fn receive_pack(&self, payload: web::Payload) -> Result { + self.handle_git_rpc("receive-pack", payload).await + } + + pub async fn info_refs(&self, service: &str) -> Result { + let git_cmd = match service { + "git-upload-pack" => "upload-pack", + "git-receive-pack" => "receive-pack", + _ => { + return Ok(HttpResponse::BadRequest().body("Invalid service")); + } + }; + + let output = tokio::process::Command::new("git") + .arg(git_cmd) + .arg("--stateless-rpc") + .arg("--advertise-refs") + .arg(&self.storage_path) + .output() + .await + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Failed to execute git: {}", e)) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Ok( + HttpResponse::InternalServerError().body(format!("Git command failed: {}", stderr)) + ); + } + + let mut response_body = Vec::new(); + let header = format!("# service={}\n", service); + write_pkt_line(&mut response_body, header.as_bytes()); + write_flush_pkt(&mut response_body); + response_body.extend_from_slice(&output.stdout); + + Ok(HttpResponse::Ok() + .content_type(format!("application/x-{}-advertisement", service)) + .insert_header(("Cache-Control", "no-cache")) + .body(response_body)) + } + + async fn handle_git_rpc( + &self, + service: &str, + mut payload: web::Payload, + ) -> Result { + let mut child = tokio::process::Command::new("git") + .arg(service) + .arg("--stateless-rpc") + .arg(&self.storage_path) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .kill_on_drop(true) + .spawn() + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Failed to spawn git: {}", e)) + })?; + + let stream = stream! { + while let Some(chunk) = payload.next().await { + match chunk { + Ok(bytes) => { yield Ok(bytes.to_vec()); } + Err(e) => { yield Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string())); } + } + } + }; + let mut stream: ByteStream = Box::pin(stream); + + if service == "receive-pack" { + let branch_protects = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(self.repo.id)) + .all(self.db.reader()) + .await + .map_err(|e| actix_web::error::ErrorInternalServerError(e.to_string()))?; + + const PACK_SIG: &[u8] = b"PACK"; + let mut pre_pack: Vec = Vec::with_capacity(65536); + + while let Some(chunk) = stream.next().await { + let bytes = match chunk { + Ok(b) => b, + Err(e) => return Err(Error::from(e)), + }; + + // Reject oversized pre-PACK data to prevent memory exhaustion + if pre_pack.len() + bytes.len() > PRE_PACK_LIMIT { + return Ok(HttpResponse::PayloadTooLarge() + .insert_header(("Content-Type", "text/plain")) + .body(format!( + "Ref negotiation exceeds {} byte limit", + PRE_PACK_LIMIT + ))); + } + + if let Some(pos) = bytes.windows(4).position(|w| w == PACK_SIG) { + pre_pack.extend_from_slice(&bytes[..pos]); + + if let Err(msg) = check_branch_protection(&branch_protects, &pre_pack) { + return Ok(HttpResponse::Forbidden() + .insert_header(("Content-Type", "text/plain")) + .body(msg)); + } + + let remaining: ByteStream = Box::pin(stream! { + yield Ok(bytes[pos..].to_vec()); + while let Some(chunk) = stream.next().await { + yield chunk; + } + }); + stream = remaining; + break; + } else { + pre_pack.extend_from_slice(&bytes); + } + } + } + + if let Some(mut stdin) = child.stdin.take() { + let write_task = actix_web::rt::spawn(async move { + while let Some(chunk) = stream.next().await { + match chunk { + Ok(bytes) => { + if let Err(e) = stdin.write_all(&bytes).await { + return Err(e); + } + } + Err(e) => { + return Err(std::io::Error::new(std::io::ErrorKind::Other, e)); + } + } + } + drop(stdin); + Ok::<_, std::io::Error>(()) + }); + + let write_result = tokio::time::timeout(GIT_OPERATION_TIMEOUT, write_task) + .await + .map_err(|_| actix_web::error::ErrorInternalServerError("Git stdin write timeout"))? + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Write error: {}", e)) + })?; + + if let Err(e) = write_result { + return Err(actix_web::error::ErrorInternalServerError(format!( + "Failed to write to git: {}", + e + ))); + } + } + + let output = tokio::time::timeout(GIT_OPERATION_TIMEOUT, child.wait_with_output()) + .await + .map_err(|_| actix_web::error::ErrorInternalServerError("Git operation timeout"))? + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Git wait failed: {}", e)) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Ok(HttpResponse::InternalServerError() + .insert_header(("Content-Type", "text/plain")) + .body(format!("Git command failed: {}", stderr))); + } + + Ok(HttpResponse::Ok() + .content_type(format!("application/x-git-{}-result", service)) + .insert_header(("Cache-Control", "no-cache")) + .body(output.stdout)) + } +} + +fn write_pkt_line(buf: &mut Vec, data: &[u8]) { + let len = data.len() + 4; + buf.extend_from_slice(format!("{:04x}", len).as_bytes()); + buf.extend_from_slice(data); +} + +fn write_flush_pkt(buf: &mut Vec) { + buf.extend_from_slice(b"0000"); +} + +#[derive(Debug)] +struct RefUpdate { + old_oid: Option, + new_oid: Option, + name: String, +} + +fn check_branch_protection( + branch_protects: &[repo_branch_protect::Model], + pre_pack: &[u8], +) -> Result<(), String> { + let refs = parse_ref_updates(pre_pack)?; + for r#ref in &refs { + for protection in branch_protects { + if r#ref.name.starts_with(&protection.branch) { + // Check deletion (new_oid is all zeros / 40 zeros) + if r#ref.new_oid.as_deref() == Some("0000000000000000000000000000000000000000") { + if protection.forbid_deletion { + return Err(format!( + "Deletion of protected branch '{}' is forbidden", + r#ref.name + )); + } + continue; + } + + // Check tag push + if r#ref.name.starts_with("refs/tags/") { + if protection.forbid_tag_push { + return Err(format!( + "Tag push to protected branch '{}' is forbidden", + r#ref.name + )); + } + continue; + } + + // Check force push: old != new AND old is non-zero (non-fast-forward update) + if let (Some(old_oid), Some(new_oid)) = + (r#ref.old_oid.as_deref(), r#ref.new_oid.as_deref()) + { + let is_new_branch = old_oid == "0000000000000000000000000000000000000000"; + if !is_new_branch + && old_oid != new_oid + && r#ref.name.starts_with("refs/heads/") + && protection.forbid_force_push + { + return Err(format!( + "Force push to protected branch '{}' is forbidden", + r#ref.name + )); + } + } + + // Check push + if protection.forbid_push { + return Err(format!( + "Push to protected branch '{}' is forbidden", + r#ref.name + )); + } + } + } + } + Ok(()) +} + +fn parse_ref_updates(buffer: &[u8]) -> Result, String> { + let text = String::from_utf8_lossy(buffer); + let mut refs = Vec::new(); + + for line in text.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') || line.starts_with("PACK") { + continue; + } + // Format: " \n" + let mut parts = line.split_whitespace(); + let old_oid = parts.next().map(|s| s.to_string()); + let new_oid = parts.next().map(|s| s.to_string()); + let name = parts + .next() + .unwrap_or("") + .trim_start_matches('\0') + .to_string(); + if !name.is_empty() { + refs.push(RefUpdate { + old_oid, + new_oid, + name, + }); + } + } + + Ok(refs) +} diff --git a/libs/git/http/lfs.rs b/libs/git/http/lfs.rs new file mode 100644 index 0000000..2c2d939 --- /dev/null +++ b/libs/git/http/lfs.rs @@ -0,0 +1,458 @@ +use crate::error::GitError; +use crate::http::handler::is_valid_oid; +use actix_web::{HttpResponse, web}; +use base64::Engine; +use base64::engine::general_purpose::STANDARD; +use db::database::AppDatabase; +use models::repos::{repo, repo_lfs_lock, repo_lfs_object}; +use sea_orm::sqlx::types::chrono; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; + +const LFS_AUTH_TOKEN_EXPIRY: u64 = 3600; + +#[derive(Deserialize, Serialize)] +pub struct BatchRequest { + pub operation: String, + pub objects: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub transfers: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub r#ref: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub hash_algo: Option, +} + +#[derive(Deserialize, Serialize)] +pub struct LfsRef { + pub name: String, +} + +#[derive(Deserialize, Serialize, Clone)] +pub struct LfsObjectReq { + pub oid: String, + pub size: i64, +} + +#[derive(Serialize)] +pub struct BatchResponse { + pub transfer: String, + pub objects: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub hash_algo: Option, +} + +#[derive(Serialize)] +pub struct LfsObjectResponse { + pub oid: String, + pub size: i64, + #[serde(skip_serializing_if = "Option::is_none")] + pub authenticated: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub actions: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +#[derive(Serialize)] +pub struct LfsAction { + pub href: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub header: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub expires_in: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub expires_at: Option, +} + +#[derive(Serialize)] +pub struct LfsError { + pub code: i32, + pub message: String, +} + +#[derive(Deserialize)] +pub struct CreateLockRequest { + pub oid: String, +} + +#[derive(Serialize)] +pub struct LockResponse { + pub path: String, + pub locked_by: uuid::Uuid, + pub locked_at: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub unlocked_at: Option, +} + +pub struct LfsHandler { + pub storage_path: PathBuf, + pub model: repo::Model, + pub db: AppDatabase, +} + +impl LfsHandler { + pub fn new(storage_path: PathBuf, model: repo::Model, db: AppDatabase) -> Self { + Self { + storage_path, + model, + db, + } + } + + fn get_lfs_storage_path(&self) -> PathBuf { + self.storage_path.join(".lfs") + } + + fn get_object_path(&self, oid: &str) -> PathBuf { + let prefix = &oid[..2]; + self.get_lfs_storage_path() + .join("objects") + .join(prefix) + .join(oid) + } + + pub async fn batch( + &self, + req: BatchRequest, + base_url: &str, + ) -> Result { + let operation = req.operation.as_str(); + + if operation != "upload" && operation != "download" { + return Err(GitError::InvalidOid(format!( + "Invalid operation: {}", + operation + ))); + } + + let oids: Vec<&str> = req.objects.iter().map(|o| o.oid.as_str()).collect(); + + // Single batch query for all OIDs + let existing: Vec = repo_lfs_object::Entity::find() + .filter(repo_lfs_object::Column::Oid.is_in(oids.clone())) + .filter(repo_lfs_object::Column::Repo.eq(self.model.id)) + .all(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + + let existing_map: HashMap<&str, &repo_lfs_object::Model> = + existing.iter().map(|m| (m.oid.as_str(), m)).collect(); + + let mut response_objects = Vec::with_capacity(req.objects.len()); + + for obj in req.objects { + let existing = existing_map.get(obj.oid.as_str()); + + let mut actions = HashMap::new(); + + match operation { + "upload" => { + if existing.is_none() { + let upload_url = format!( + "{}/{}/{}.git/info/lfs/objects/{}", + base_url, self.model.project, self.model.repo_name, obj.oid + ); + + let mut headers = HashMap::new(); + headers.insert("authorization".to_string(), "Bearer token".to_string()); + + actions.insert( + "upload".to_string(), + LfsAction { + href: upload_url, + header: Some(headers), + expires_in: Some(LFS_AUTH_TOKEN_EXPIRY as i64), + expires_at: None, + }, + ); + } + } + "download" => match existing { + Some(_) => { + let download_url = format!( + "{}/{}/{}.git/info/lfs/objects/{}", + base_url, self.model.project, self.model.repo_name, obj.oid + ); + + let mut headers = HashMap::new(); + headers.insert("authorization".to_string(), "Bearer token".to_string()); + + actions.insert( + "download".to_string(), + LfsAction { + href: download_url, + header: Some(headers), + expires_in: Some(LFS_AUTH_TOKEN_EXPIRY as i64), + expires_at: None, + }, + ); + } + None => { + response_objects.push(LfsObjectResponse { + oid: obj.oid, + size: obj.size, + authenticated: None, + actions: None, + error: Some(LfsError { + code: 404, + message: "Object does not exist".to_string(), + }), + }); + continue; + } + }, + _ => {} + } + + response_objects.push(LfsObjectResponse { + oid: obj.oid, + size: obj.size, + authenticated: Some(true), + actions: if actions.is_empty() { + None + } else { + Some(actions) + }, + error: None, + }); + } + + Ok(BatchResponse { + transfer: "basic".to_string(), + objects: response_objects, + hash_algo: req.hash_algo, + }) + } + + pub async fn upload_object( + &self, + oid: &str, + payload: web::Payload, + _auth_token: &str, + ) -> Result { + if !is_valid_oid(oid) { + return Err(GitError::InvalidOid(format!("Invalid OID format: {}", oid))); + } + + let object_path = self.get_object_path(oid); + if let Some(parent) = object_path.parent() { + tokio::fs::create_dir_all(parent) + .await + .map_err(|e| GitError::Internal(format!("Failed to create directory: {}", e)))?; + } + + let temp_path = object_path.with_extension("tmp"); + let mut file = tokio::fs::File::create(&temp_path) + .await + .map_err(|e| GitError::Internal(format!("Failed to create temp file: {}", e)))?; + + use futures_util::stream::StreamExt; + use sha2::Digest; + use tokio::io::AsyncWriteExt; + + let mut payload = payload; + let mut size = 0i64; + let mut hasher = sha2::Sha256::new(); + + while let Some(chunk) = payload.next().await { + let chunk = chunk.map_err(|e| GitError::Internal(format!("Payload error: {}", e)))?; + size += chunk.len() as i64; + hasher.update(&chunk); + if let Err(e) = file.write_all(&chunk).await { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(GitError::Internal(format!("Failed to write file: {}", e))); + } + } + + file.flush() + .await + .map_err(|e| GitError::Internal(format!("Failed to flush file: {}", e)))?; + drop(file); + + let hash_bytes = hasher.finalize(); + let calculated_oid = STANDARD.encode(hash_bytes.as_slice()); + + if calculated_oid != oid { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(GitError::InvalidOid(format!( + "OID mismatch: expected {}, got {}", + oid, calculated_oid + ))); + } + + if let Err(e) = tokio::fs::rename(&temp_path, &object_path).await { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(GitError::Internal(format!("Failed to move file: {}", e))); + } + + let now = chrono::Utc::now(); + let new_object = repo_lfs_object::ActiveModel { + id: Set(0i64), + oid: Set(oid.to_string()), + repo: Set(self.model.id), + size: Set(size), + storage_path: Set(object_path.to_string_lossy().to_string()), + uploaded_by: Set(None), + uploaded_at: Set(now), + }; + + new_object + .insert(self.db.writer()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(HttpResponse::Ok().finish()) + } + + pub async fn download_object( + &self, + oid: &str, + _auth_token: &str, + ) -> Result { + if !is_valid_oid(oid) { + return Err(GitError::InvalidOid(format!("Invalid OID format: {}", oid))); + } + + let obj = repo_lfs_object::Entity::find() + .filter(repo_lfs_object::Column::Oid.eq(oid)) + .filter(repo_lfs_object::Column::Repo.eq(self.model.id)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + .ok_or_else(|| GitError::NotFound("Object not found".to_string()))?; + + let file = tokio::fs::File::open(&obj.storage_path) + .await + .map_err(|e| GitError::Internal(format!("Failed to open file: {}", e)))?; + + use actix_web::body::BodyStream; + use futures_util::stream; + use tokio::io::AsyncReadExt; + + let chunk_size: usize = 65536; + + let stream = stream::unfold(file, move |mut file| async move { + let mut buffer = vec![0u8; chunk_size]; + match file.read(&mut buffer).await { + Ok(0) => None, + Ok(n) => { + buffer.truncate(n); + Some(( + Ok::<_, std::io::Error>(actix_web::web::Bytes::from(buffer)), + file, + )) + } + Err(e) => Some((Err(e), file)), + } + }); + + Ok(HttpResponse::Ok() + .content_type("application/octet-stream") + .insert_header(("Content-Length", obj.size.to_string())) + .body(BodyStream::new(stream))) + } + + pub async fn lock_object( + &self, + oid: &str, + user_uid: uuid::Uuid, + ) -> Result { + use sea_orm::ActiveModelTrait; + + if !is_valid_oid(oid) { + return Err(GitError::InvalidOid(format!("Invalid OID format: {}", oid))); + } + + let now = chrono::Utc::now(); + + let am = repo_lfs_lock::ActiveModel { + repo: Set(self.model.id), + path: Set(oid.to_string()), + lock_type: Set("upload".to_string()), + locked_by: Set(user_uid), + locked_at: Set(now), + unlocked_at: Set(None), + }; + + match am.insert(self.db.writer()).await { + Ok(model) => Ok(LockResponse { + path: model.path, + locked_by: model.locked_by, + locked_at: model.locked_at.to_rfc3339(), + unlocked_at: model.unlocked_at.map(|t| t.to_rfc3339()), + }), + Err(e) => { + let err_msg = format!("{}", e); + if err_msg.contains("duplicate key") || err_msg.contains("23505") { + return Err(GitError::Locked("Already locked".to_string())); + } + Err(GitError::Internal(format!("DB error: {}", e))) + } + } + } + + pub async fn unlock_object(&self, lock_id: &str, user_uid: uuid::Uuid) -> Result<(), GitError> { + let existing = repo_lfs_lock::Entity::find() + .filter(repo_lfs_lock::Column::Repo.eq(self.model.id)) + .filter(repo_lfs_lock::Column::Path.eq(lock_id.to_string())) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + .ok_or_else(|| GitError::NotFound("Lock not found".to_string()))?; + + if existing.locked_by != user_uid && existing.locked_by != self.model.created_by { + return Err(GitError::PermissionDenied( + "Not allowed to unlock".to_string(), + )); + } + + let now = chrono::Utc::now(); + let mut am: repo_lfs_lock::ActiveModel = existing.into(); + am.unlocked_at = Set(Some(now)); + let _: repo_lfs_lock::Model = am + .update(self.db.writer()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(()) + } + + pub async fn list_locks(&self, maybe_oid: Option<&str>) -> Result, GitError> { + let mut q = + repo_lfs_lock::Entity::find().filter(repo_lfs_lock::Column::Repo.eq(self.model.id)); + if let Some(oid) = maybe_oid { + q = q.filter(repo_lfs_lock::Column::Path.eq(oid.to_string())); + } + let rows: Vec = q + .all(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(rows + .into_iter() + .map(|r| LockResponse { + path: r.path, + locked_by: r.locked_by, + locked_at: r.locked_at.to_rfc3339(), + unlocked_at: r.unlocked_at.map(|t| t.to_rfc3339()), + }) + .collect()) + } + + pub async fn get_lock(&self, path: &str) -> Result { + let r = repo_lfs_lock::Entity::find() + .filter(repo_lfs_lock::Column::Repo.eq(self.model.id)) + .filter(repo_lfs_lock::Column::Path.eq(path.to_string())) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + .ok_or_else(|| GitError::NotFound("Lock not found".to_string()))?; + Ok(LockResponse { + path: r.path, + locked_by: r.locked_by, + locked_at: r.locked_at.to_rfc3339(), + unlocked_at: r.unlocked_at.map(|t| t.to_rfc3339()), + }) + } +} diff --git a/libs/git/http/lfs_routes.rs b/libs/git/http/lfs_routes.rs new file mode 100644 index 0000000..423580c --- /dev/null +++ b/libs/git/http/lfs_routes.rs @@ -0,0 +1,222 @@ +use crate::error::GitError; +use crate::http::HttpAppState; +use crate::http::handler::is_valid_oid; +use crate::http::lfs::{BatchRequest, CreateLockRequest, LfsHandler}; +use crate::http::utils::get_repo_model; +use actix_web::{Error, HttpRequest, HttpResponse, web}; +use std::path::PathBuf; + +fn base_url(req: &HttpRequest) -> String { + let conn_info = req.connection_info(); + format!("{}://{}", conn_info.scheme(), conn_info.host()) +} + +fn bearer_token(req: &HttpRequest) -> Result { + let auth_header = req + .headers() + .get("authorization") + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Missing authorization header"))? + .to_str() + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid authorization header"))?; + + if let Some(token) = auth_header.strip_prefix("Bearer ") { + Ok(token.to_string()) + } else { + Err(actix_web::error::ErrorUnauthorized( + "Invalid authorization format", + )) + } +} + +fn user_uid(req: &HttpRequest, repo: &models::repos::repo::Model) -> Result { + if let Some(hv) = req.headers().get("x-user-uid") { + if let Ok(s) = hv.to_str() { + if let Ok(uid) = s.parse::() { + return Ok(uid); + } + } + } + Ok(repo.created_by) +} + +fn client_ip(req: &HttpRequest) -> String { + req.connection_info() + .realip_remote_addr() + .unwrap_or("unknown") + .to_string() +} + +pub async fn lfs_batch( + req: HttpRequest, + path: web::Path<(String, String)>, + body: web::Json, + state: web::Data, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + + let ip = client_ip(&req); + if !state.rate_limiter.is_ip_read_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + let response = handler + .batch(body.into_inner(), &base_url(&req)) + .await + .map_err(|e| actix_web::error::ErrorInternalServerError(e.to_string()))?; + + Ok(HttpResponse::Ok() + .content_type("application/vnd.git-lfs+json") + .json(response)) +} + +pub async fn lfs_upload( + req: HttpRequest, + path: web::Path<(String, String, String)>, + payload: web::Payload, + state: web::Data, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + + if !is_valid_oid(&oid) { + return Err(actix_web::error::ErrorBadRequest("Invalid OID format")); + } + + let ip = client_ip(&req); + if !state.rate_limiter.is_ip_write_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let token = bearer_token(&req)?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.upload_object(&oid, payload, &token).await { + Ok(response) => Ok(response), + Err(GitError::InvalidOid(_)) => Err(actix_web::error::ErrorBadRequest("Invalid OID")), + Err(GitError::AuthFailed(_)) => Err(actix_web::error::ErrorUnauthorized("Unauthorized")), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} + +pub async fn lfs_download( + req: HttpRequest, + path: web::Path<(String, String, String)>, + state: web::Data, +) -> Result { + let (namespace, repo_name, oid) = path.into_inner(); + + if !is_valid_oid(&oid) { + return Err(actix_web::error::ErrorBadRequest("Invalid OID format")); + } + + let ip = client_ip(&req); + if !state.rate_limiter.is_ip_read_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let token = bearer_token(&req)?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.download_object(&oid, &token).await { + Ok(response) => Ok(response), + Err(GitError::NotFound(_)) => Err(actix_web::error::ErrorNotFound("Object not found")), + Err(GitError::AuthFailed(_)) => Err(actix_web::error::ErrorUnauthorized("Unauthorized")), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} + +pub async fn lfs_lock_create( + req: HttpRequest, + path: web::Path<(String, String)>, + body: web::Json, + state: web::Data, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + + let ip = client_ip(&req); + if !state.rate_limiter.is_ip_write_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let uid = user_uid(&req, &repo)?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.lock_object(&body.oid, uid).await { + Ok(lock) => Ok(HttpResponse::Created().json(lock)), + Err(GitError::Locked(msg)) => Ok(HttpResponse::Conflict().body(msg)), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} + +pub async fn lfs_lock_list( + _req: HttpRequest, + path: web::Path<(String, String)>, + query: web::Query>, + state: web::Data, +) -> Result { + let (namespace, repo_name) = path.into_inner(); + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + + let maybe_oid = query.get("oid").map(|s| s.as_str()); + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.list_locks(maybe_oid).await { + Ok(list) => Ok(HttpResponse::Ok().json(list)), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} + +pub async fn lfs_lock_get( + _req: HttpRequest, + path: web::Path<(String, String, String)>, + state: web::Data, +) -> Result { + let (namespace, repo_name, lock_path) = path.into_inner(); + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.get_lock(&lock_path).await { + Ok(lock) => Ok(HttpResponse::Ok().json(lock)), + Err(GitError::NotFound(_)) => Err(actix_web::error::ErrorNotFound("Lock not found")), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} + +pub async fn lfs_lock_delete( + req: HttpRequest, + path: web::Path<(String, String, String)>, + state: web::Data, +) -> Result { + let (namespace, repo_name, lock_id) = path.into_inner(); + + let ip = client_ip(&req); + if !state.rate_limiter.is_ip_write_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let repo = get_repo_model(&namespace, &repo_name, &state.db).await?; + let uid = user_uid(&req, &repo)?; + let handler = LfsHandler::new(PathBuf::from(&repo.storage_path), repo, state.db.clone()); + + match handler.unlock_object(&lock_id, uid).await { + Ok(()) => Ok(HttpResponse::NoContent().finish()), + Err(GitError::PermissionDenied(_)) => Err(actix_web::error::ErrorForbidden("Not allowed")), + Err(GitError::NotFound(_)) => Err(actix_web::error::ErrorNotFound("Lock not found")), + Err(e) => Err(actix_web::error::ErrorInternalServerError(e.to_string())), + } +} diff --git a/libs/git/http/mod.rs b/libs/git/http/mod.rs new file mode 100644 index 0000000..0da2632 --- /dev/null +++ b/libs/git/http/mod.rs @@ -0,0 +1,126 @@ +use actix_web::{App, HttpServer, web}; +use config::AppConfig; +use db::cache::AppCache; +use db::database::AppDatabase; +use slog::{Logger, error, info}; +use std::sync::Arc; +use std::time::Duration; +use tokio::time::timeout; + +pub mod auth; +pub mod handler; +pub mod lfs; +pub mod lfs_routes; +pub mod rate_limit; +pub mod routes; +pub mod utils; + +#[derive(Clone)] +pub struct HttpAppState { + pub db: AppDatabase, + pub cache: AppCache, + pub sync: crate::ssh::ReceiveSyncService, + pub rate_limiter: Arc, + pub logger: Logger, +} + +pub fn git_http_cfg(cfg: &mut web::ServiceConfig) { + cfg.route( + "/{namespace}/{repo_name}.git/info/refs", + web::get().to(routes::info_refs), + ) + .route( + "/{namespace}/{repo_name}.git/git-upload-pack", + web::post().to(routes::upload_pack), + ) + .route( + "/{namespace}/{repo_name}.git/git-receive-pack", + web::post().to(routes::receive_pack), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/objects/batch", + web::post().to(lfs_routes::lfs_batch), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/objects/{oid}", + web::put().to(lfs_routes::lfs_upload), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/objects/{oid}", + web::get().to(lfs_routes::lfs_download), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/locks", + web::post().to(lfs_routes::lfs_lock_create), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/locks", + web::get().to(lfs_routes::lfs_lock_list), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/locks/{id}", + web::get().to(lfs_routes::lfs_lock_get), + ) + .route( + "/{namespace}/{repo_name}.git/info/lfs/locks/{id}", + web::delete().to(lfs_routes::lfs_lock_delete), + ); +} + +pub async fn run_http(config: AppConfig, logger: Logger) -> anyhow::Result<()> { + let (db, app_cache) = tokio::join!(AppDatabase::init(&config), AppCache::init(&config),); + let db = db?; + let app_cache = app_cache?; + + let redis_pool = app_cache.redis_pool().clone(); + let sync = crate::ssh::ReceiveSyncService::new(redis_pool, logger.clone()); + + let rate_limiter = Arc::new(rate_limit::RateLimiter::new( + rate_limit::RateLimitConfig::default(), + )); + let _cleanup = rate_limiter.clone().start_cleanup(); + + let state = HttpAppState { + db: db.clone(), + cache: app_cache.clone(), + sync, + rate_limiter, + logger: logger.clone(), + }; + + let logger_startup = logger.clone(); + info!(&logger_startup, "Starting git HTTP server on 0.0.0.0:8021"); + + let server = HttpServer::new(move || { + App::new() + .app_data(web::Data::new(state.clone())) + .configure(git_http_cfg) + }) + .bind("0.0.0.0:8021")? + .run(); + + let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + let server = server; + let logger_shutdown = logger.clone(); + let server_handle = tokio::spawn(async move { + tokio::select! { + result = server => { + if let Err(e) = result { + error!(&logger_shutdown, "HTTP server error: {}", e); + } + } + _ = shutdown_rx => { + info!(&logger_shutdown, "HTTP server shutting down"); + } + } + }); + + tokio::signal::ctrl_c().await?; + info!(&logger, "Received shutdown signal"); + drop(shutdown_tx); + + let _ = timeout(Duration::from_secs(5), server_handle).await; + + info!(&logger, "Git HTTP server stopped"); + Ok(()) +} diff --git a/libs/git/http/rate_limit.rs b/libs/git/http/rate_limit.rs new file mode 100644 index 0000000..b230030 --- /dev/null +++ b/libs/git/http/rate_limit.rs @@ -0,0 +1,142 @@ +//! HTTP rate limiting for git operations. +//! +//! Uses a token-bucket approach with per-IP and per-repo-write limits. +//! Cleanup runs every 5 minutes to prevent unbounded memory growth. + +use std::collections::HashMap; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; +use tokio::time::interval; + +#[derive(Debug, Clone)] +pub struct RateLimitConfig { + /// Requests allowed per window for read operations. + pub read_requests_per_window: u32, + /// Requests allowed per window for write operations. + pub write_requests_per_window: u32, + /// Window duration in seconds. + pub window_secs: u64, +} + +impl Default for RateLimitConfig { + fn default() -> Self { + Self { + read_requests_per_window: 120, + write_requests_per_window: 30, + window_secs: 60, + } + } +} + +#[derive(Debug)] +struct RateLimitBucket { + read_count: u32, + write_count: u32, + reset_time: Instant, +} + +pub struct RateLimiter { + buckets: Arc>>, + config: RateLimitConfig, +} + +impl RateLimiter { + pub fn new(config: RateLimitConfig) -> Self { + Self { + buckets: Arc::new(RwLock::new(HashMap::new())), + config, + } + } + + pub async fn is_ip_read_allowed(&self, ip: &str) -> bool { + let key = format!("ip:read:{}", ip); + self.is_allowed(&key, self.config.read_requests_per_window) + .await + } + + pub async fn is_ip_write_allowed(&self, ip: &str) -> bool { + let key = format!("ip:write:{}", ip); + self.is_allowed(&key, self.config.write_requests_per_window) + .await + } + + pub async fn is_repo_write_allowed(&self, ip: &str, repo_path: &str) -> bool { + let key = format!("repo:write:{}:{}", ip, repo_path); + self.is_allowed(&key, self.config.write_requests_per_window) + .await + } + + async fn is_allowed(&self, key: &str, limit: u32) -> bool { + let now = Instant::now(); + let mut buckets = self.buckets.write().await; + + let bucket = buckets + .entry(key.to_string()) + .or_insert_with(|| RateLimitBucket { + read_count: 0, + write_count: 0, + reset_time: now + Duration::from_secs(self.config.window_secs), + }); + + if now >= bucket.reset_time { + bucket.read_count = 0; + bucket.write_count = 0; + bucket.reset_time = now + Duration::from_secs(self.config.window_secs); + } + + // Use read_count for both read/write since we don't distinguish in bucket + if bucket.read_count >= limit { + return false; + } + + bucket.read_count += 1; + true + } + + pub async fn retry_after(&self, ip: &str) -> u64 { + let key_read = format!("ip:read:{}", ip); + let now = Instant::now(); + let buckets = self.buckets.read().await; + + if let Some(bucket) = buckets.get(&key_read) { + if now < bucket.reset_time { + return bucket.reset_time.saturating_duration_since(now).as_secs() as u64; + } + } + 0 + } + + /// Start a background cleanup task that removes expired entries. + /// Should be spawned once at startup. + pub fn start_cleanup(self: Arc) -> tokio::task::JoinHandle<()> { + tokio::spawn(async move { + let mut ticker = interval(Duration::from_secs(300)); // every 5 minutes + loop { + ticker.tick().await; + let now = Instant::now(); + let mut buckets = self.buckets.write().await; + buckets.retain(|_, bucket| now < bucket.reset_time); + } + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_rate_limit_allows_requests_up_to_limit() { + let limiter = Arc::new(RateLimiter::new(RateLimitConfig { + read_requests_per_window: 3, + write_requests_per_window: 1, + window_secs: 60, + })); + + for _ in 0..3 { + assert!(limiter.is_ip_read_allowed("1.2.3.4").await); + } + assert!(!limiter.is_ip_read_allowed("1.2.3.4").await); + } +} diff --git a/libs/git/http/routes.rs b/libs/git/http/routes.rs new file mode 100644 index 0000000..4950575 --- /dev/null +++ b/libs/git/http/routes.rs @@ -0,0 +1,107 @@ +use crate::http::HttpAppState; +use crate::http::auth::authorize_repo_access; +use crate::http::handler::GitHttpHandler; +use crate::http::utils::get_repo_model; +use crate::ssh::RepoReceiveSyncTask; +use actix_web::{Error, HttpRequest, HttpResponse, web}; +use std::path::PathBuf; +use std::time::Duration; +use tokio::time::timeout; + +pub async fn info_refs( + req: HttpRequest, + path: web::Path<(String, String)>, + state: web::Data, +) -> Result { + let ip = extract_ip(&req); + if !state.rate_limiter.is_ip_read_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let service_param = req + .query_string() + .split('&') + .find(|s| s.starts_with("service=")) + .and_then(|s| s.strip_prefix("service=")) + .ok_or_else(|| actix_web::error::ErrorBadRequest("Missing service parameter"))?; + + if service_param != "git-upload-pack" && service_param != "git-receive-pack" { + return Ok(HttpResponse::BadRequest().body("Invalid service")); + } + + let path_inner = path.into_inner(); + let model = get_repo_model(&path_inner.0, &path_inner.1, &state.db).await?; + let is_write = service_param == "git-receive-pack"; + authorize_repo_access(&req, &state.db, &model, is_write).await?; + + let storage_path = PathBuf::from(&model.storage_path); + let handler = GitHttpHandler::new(storage_path, model, state.db.clone()); + handler.info_refs(service_param).await +} + +pub async fn upload_pack( + req: HttpRequest, + path: web::Path<(String, String)>, + payload: web::Payload, + state: web::Data, +) -> Result { + let ip = extract_ip(&req); + if !state.rate_limiter.is_ip_read_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let path_inner = path.into_inner(); + let model = get_repo_model(&path_inner.0, &path_inner.1, &state.db).await?; + authorize_repo_access(&req, &state.db, &model, false).await?; + + let storage_path = PathBuf::from(&model.storage_path); + let handler = GitHttpHandler::new(storage_path, model, state.db.clone()); + handler.upload_pack(payload).await +} + +pub async fn receive_pack( + req: HttpRequest, + path: web::Path<(String, String)>, + payload: web::Payload, + state: web::Data, +) -> Result { + let ip = extract_ip(&req); + if !state.rate_limiter.is_ip_write_allowed(&ip).await { + return Err(actix_web::error::ErrorTooManyRequests( + "Rate limit exceeded", + )); + } + + let path_inner = path.into_inner(); + let model = get_repo_model(&path_inner.0, &path_inner.1, &state.db).await?; + authorize_repo_access(&req, &state.db, &model, true).await?; + + let storage_path = PathBuf::from(&model.storage_path); + let handler = GitHttpHandler::new(storage_path, model.clone(), state.db.clone()); + let result = handler.receive_pack(payload).await; + + let _ = tokio::spawn({ + let sync = state.sync.clone(); + let repo_uid = model.id; + async move { + let _ = timeout( + Duration::from_secs(5), + sync.send(RepoReceiveSyncTask { repo_uid }), + ) + .await; + } + }); + + result +} + +fn extract_ip(req: &HttpRequest) -> String { + req.connection_info() + .realip_remote_addr() + .unwrap_or("unknown") + .to_string() +} diff --git a/libs/git/http/utils.rs b/libs/git/http/utils.rs new file mode 100644 index 0000000..5586a96 --- /dev/null +++ b/libs/git/http/utils.rs @@ -0,0 +1,80 @@ +use actix_web::{Error, HttpRequest}; +use base64::Engine; +use base64::engine::general_purpose::STANDARD; +use db::database::AppDatabase; +use models::projects::{project, project_history_name}; +use models::repos::repo; +use sea_orm::*; +use sha2::{Digest, Sha256}; + +pub async fn get_repo_model( + namespace: &str, + repo_name: &str, + db: &AppDatabase, +) -> Result { + let project_id = if let Some(project_model) = project::Entity::find() + .filter(project::Column::Name.eq(namespace)) + .one(db.reader()) + .await + .map_err(|_| actix_web::error::ErrorInternalServerError("Database error"))? + { + project_model.id + } else if let Some(history) = project_history_name::Entity::find() + .filter(project_history_name::Column::HistoryName.eq(namespace)) + .one(db.reader()) + .await + .map_err(|_| actix_web::error::ErrorInternalServerError("Database error"))? + { + history.project_uid + } else { + return Err(actix_web::error::ErrorNotFound("Project not found").into()); + }; + + let repo = repo::Entity::find() + .filter(repo::Column::RepoName.eq(repo_name)) + .filter(repo::Column::Project.eq(project_id)) + .one(db.reader()) + .await + .map_err(|_| actix_web::error::ErrorInternalServerError("Database error"))? + .ok_or_else(|| actix_web::error::ErrorNotFound("Repository not found"))?; + + Ok(repo) +} + +pub fn hash_access_key(access_key: &str) -> String { + let mut hasher = Sha256::new(); + hasher.update(access_key.as_bytes()); + STANDARD.encode(hasher.finalize()) +} + +pub fn extract_basic_credentials(req: &HttpRequest) -> Result<(String, String), Error> { + let auth_header = req + .headers() + .get("authorization") + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Missing authorization header"))? + .to_str() + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid authorization header"))?; + + let encoded = auth_header + .strip_prefix("Basic ") + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Invalid authorization scheme"))?; + + let decoded = STANDARD + .decode(encoded) + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid basic authorization encoding"))?; + + let decoded = String::from_utf8(decoded) + .map_err(|_| actix_web::error::ErrorUnauthorized("Invalid basic authorization payload"))?; + + let (username, access_key) = decoded + .split_once(':') + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Invalid basic authorization format"))?; + + if username.is_empty() || access_key.is_empty() { + return Err(actix_web::error::ErrorUnauthorized( + "Username or access key is empty", + )); + } + + Ok((username.to_string(), access_key.to_string())) +} diff --git a/libs/git/lfs/mod.rs b/libs/git/lfs/mod.rs new file mode 100644 index 0000000..62e90e8 --- /dev/null +++ b/libs/git/lfs/mod.rs @@ -0,0 +1,4 @@ +//! LFS (Large File Storage) domain — pointer file parsing, attribute management, +//! and local object operations. +pub mod ops; +pub mod types; diff --git a/libs/git/lfs/ops.rs b/libs/git/lfs/ops.rs new file mode 100644 index 0000000..b7e56b9 --- /dev/null +++ b/libs/git/lfs/ops.rs @@ -0,0 +1,320 @@ +//! LFS operations on a Git repository. + +use std::fs; +use std::path::{Path, PathBuf}; + +use globset::Glob; + +use crate::commit::types::CommitOid; +use crate::lfs::types::{LfsConfig, LfsEntry, LfsOid, LfsPointer}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn lfs_pointer_from_blob(&self, oid: &CommitOid) -> GitResult> { + let content = self.blob_content(oid)?; + Ok(LfsPointer::from_bytes(&content.content)) + } + + pub fn lfs_is_pointer(&self, oid: &CommitOid) -> GitResult { + let content = self.blob_content(oid)?; + Ok(LfsPointer::from_bytes(&content.content).is_some()) + } + + pub fn lfs_resolve_oid(&self, oid: &CommitOid) -> GitResult> { + let pointer = self.lfs_pointer_from_blob(oid)?; + Ok(pointer.map(|p| p.oid)) + } + + pub fn lfs_create_pointer( + &self, + oid: &LfsOid, + size: u64, + extra: &[(String, String)], + ) -> GitResult { + let pointer = LfsPointer { + version: "https://git-lfs.github.com/spec/v1".to_string(), + oid: oid.clone(), + size, + extra: extra.to_vec(), + }; + self.blob_create_from_string(&pointer.to_string()) + } + + pub fn lfs_scan_tree(&self, tree_oid: &CommitOid, recursive: bool) -> GitResult> { + let tree_oid = tree_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(tree_oid.to_string()))?; + let tree = self + .repo() + .find_tree(tree_oid) + .map_err(|_| GitError::ObjectNotFound(tree_oid.to_string()))?; + let mut entries = Vec::new(); + self.lfs_scan_tree_impl(&mut entries, &tree, "", recursive)?; + Ok(entries) + } + + fn lfs_scan_tree_impl( + &self, + out: &mut Vec, + tree: &git2::Tree<'_>, + prefix: &str, + recursive: bool, + ) -> GitResult<()> { + for entry in tree.iter() { + let name = entry.name().unwrap_or(""); + let full_path = if prefix.is_empty() { + name.to_string() + } else { + format!("{}/{}", prefix, name) + }; + + let blob_oid = entry.id(); + let obj = match self.repo().find_object(blob_oid, None) { + Ok(o) => o, + Err(_) => continue, + }; + + if obj.kind() == Some(git2::ObjectType::Tree) { + if recursive { + let sub_tree = self + .repo() + .find_tree(blob_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + self.lfs_scan_tree_impl(out, &sub_tree, &full_path, recursive)?; + } + } else if let Some(blob) = obj.as_blob() { + if let Some(pointer) = LfsPointer::from_bytes(blob.content()) { + out.push(LfsEntry { + path: full_path, + pointer, + size: 0, + }); + } + } + } + Ok(()) + } + + fn gitattributes_path(&self) -> PathBuf { + self.repo() + .workdir() + .unwrap_or_else(|| self.repo().path()) + .join(".gitattributes") + } + + pub fn lfs_gitattributes_list(&self) -> GitResult> { + let path = self.gitattributes_path(); + if !path.exists() { + return Ok(Vec::new()); + } + let content = fs::read_to_string(&path).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(content + .lines() + .filter(|l| l.contains("filter=lfs")) + .map(|l| l.trim().to_string()) + .collect()) + } + + pub fn lfs_gitattributes_add(&self, pattern: &str) -> GitResult<()> { + let line = format!("{} filter=lfs diff=lfs merge=lfs -text", pattern); + let path = self.gitattributes_path(); + + let content = if path.exists() { + fs::read_to_string(&path).map_err(|e| GitError::IoError(e.to_string()))? + } else { + String::new() + }; + + if content.lines().any(|l| l.trim() == line) { + return Ok(()); + } + + let new_content = if content.ends_with('\n') || content.is_empty() { + format!("{}{}\n", content, line) + } else { + format!("{}\n{}\n", content, line) + }; + + fs::write(&path, new_content).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(()) + } + + pub fn lfs_gitattributes_remove(&self, pattern: &str) -> GitResult { + let path = self.gitattributes_path(); + if !path.exists() { + return Ok(false); + } + let content = fs::read_to_string(&path).map_err(|e| GitError::IoError(e.to_string()))?; + let target = format!("{} filter=lfs diff=lfs merge=lfs -text", pattern); + let new_lines: Vec<&str> = content.lines().filter(|l| l.trim() != target).collect(); + + if new_lines.len() == content.lines().count() { + return Ok(false); + } + + let new_content = new_lines.join("\n"); + fs::write(&path, new_content).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(true) + } + + pub fn lfs_gitattributes_match(&self, path_str: &str) -> GitResult { + let patterns = self.lfs_gitattributes_list()?; + for pattern in patterns { + let glob_str = pattern.split_whitespace().next().unwrap_or(&pattern); + if let Ok(glob) = Glob::new(glob_str) { + let matcher = glob.compile_matcher(); + if matcher.is_match(path_str) { + return Ok(true); + } + } + } + Ok(false) + } + + fn lfs_objects_dir(&self) -> PathBuf { + self.repo().path().join("lfs").join("objects") + } + + /// Validates that the OID is at least 4 characters for path splitting. + fn lfs_validate_oid(&self, oid: &LfsOid) -> GitResult<()> { + if oid.as_str().len() < 4 { + return Err(GitError::Internal(format!( + "LFS OID too short for path splitting: {}", + oid + ))); + } + Ok(()) + } + + pub fn lfs_object_cached(&self, oid: &LfsOid) -> bool { + if oid.as_str().len() < 4 { + return false; + } + let (p1, rest) = oid.as_str().split_at(2); + let (p2, _) = rest.split_at(2); + self.lfs_objects_dir() + .join(p1) + .join(p2) + .join(oid.as_str()) + .exists() + } + + pub fn lfs_object_path(&self, oid: &LfsOid) -> GitResult { + self.lfs_validate_oid(oid)?; + let (p1, rest) = oid.as_str().split_at(2); + let (p2, _) = rest.split_at(2); + Ok(self.lfs_objects_dir().join(p1).join(p2).join(oid.as_str())) + } + + pub fn lfs_object_put(&self, oid: &LfsOid, content: &[u8]) -> GitResult { + use std::io::Write; + + let path = self.lfs_object_path(oid)?; + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(|e| GitError::IoError(e.to_string()))?; + } + let mut file = fs::File::create(&path).map_err(|e| GitError::IoError(e.to_string()))?; + file.write_all(content) + .map_err(|e| GitError::IoError(e.to_string()))?; + Ok(path) + } + + pub fn lfs_object_get(&self, oid: &LfsOid) -> GitResult> { + let path = self.lfs_object_path(oid)?; + if !path.exists() { + return Err(GitError::LfsError(format!( + "object {} not found in local cache", + oid + ))); + } + fs::read(&path).map_err(|e| GitError::IoError(e.to_string())) + } + + pub fn lfs_object_list(&self) -> GitResult> { + let base = self.lfs_objects_dir(); + if !base.exists() { + return Ok(Vec::new()); + } + let mut oids = Vec::new(); + self.lfs_object_list_impl(&base, &mut oids)?; + Ok(oids) + } + + fn lfs_object_list_impl(&self, dir: &Path, oids: &mut Vec) -> GitResult<()> { + for entry in fs::read_dir(dir).map_err(|e| GitError::IoError(e.to_string()))? { + let entry = entry.map_err(|e| GitError::IoError(e.to_string()))?; + let path = entry.path(); + if path.is_dir() { + self.lfs_object_list_impl(&path, oids)?; + } else if path.is_file() { + if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + oids.push(LfsOid::new(name)); + } + } + } + Ok(()) + } + + pub fn lfs_object_delete(&self, oid: &LfsOid) -> GitResult { + let path = self.lfs_object_path(oid)?; + if path.exists() { + fs::remove_file(&path).map_err(|e| GitError::IoError(e.to_string()))?; + Ok(true) + } else { + Ok(false) + } + } + + pub fn lfs_cache_size(&self) -> GitResult { + let oids = self.lfs_object_list()?; + let mut total = 0u64; + for oid in oids { + if let Ok(path) = self.lfs_object_path(&oid) { + if let Ok(meta) = fs::metadata(&path) { + total += meta.len(); + } + } + } + Ok(total) + } + + pub fn lfs_config(&self) -> GitResult { + let root = self.repo().workdir().unwrap_or_else(|| self.repo().path()); + let path = root.join(".lfsconfig"); + if !path.exists() { + return Ok(LfsConfig::new()); + } + let content = fs::read_to_string(&path).map_err(|e| GitError::IoError(e.to_string()))?; + let mut config = LfsConfig::new(); + for line in content.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') { + continue; + } + if let Some((k, v)) = line.split_once('=') { + let k = k.trim(); + let v = v.trim(); + if k == "lfs.url" || k == "lfs.endpoint" { + config.endpoint = Some(v.to_string()); + } else if k == "lfs.accesskey" || k == "lfs.access_token" { + config.access_token = Some(v.to_string()); + } + } + } + Ok(config) + } + + pub fn lfs_config_set(&self, config: &LfsConfig) -> GitResult<()> { + let root = self.repo().workdir().unwrap_or_else(|| self.repo().path()); + let path = root.join(".lfsconfig"); + let mut lines = Vec::new(); + if let Some(ref ep) = config.endpoint { + lines.push(format!("lfs.url = {}", ep)); + } + if let Some(ref tok) = config.access_token { + lines.push(format!("lfs.access_token = {}", tok)); + } + fs::write(&path, lines.join("\n") + "\n").map_err(|e| GitError::IoError(e.to_string()))?; + Ok(()) + } +} diff --git a/libs/git/lfs/types.rs b/libs/git/lfs/types.rs new file mode 100644 index 0000000..a274766 --- /dev/null +++ b/libs/git/lfs/types.rs @@ -0,0 +1,151 @@ +//! Serializable types for the LFS domain. + +use serde::{Deserialize, Serialize}; + +/// The SHA256 OID of an LFS object. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct LfsOid(pub String); + +impl LfsOid { + pub fn new(hex: &str) -> Self { + Self(hex.to_lowercase()) + } + + pub fn as_str(&self) -> &str { + &self.0 + } + + pub fn is_valid(&self) -> bool { + self.0.len() == 64 && self.0.chars().all(|c| c.is_ascii_hexdigit()) + } +} + +impl std::fmt::Display for LfsOid { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl AsRef for LfsOid { + fn as_ref(&self) -> &str { + &self.0 + } +} + +/// An LFS pointer file parsed from a blob. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct LfsPointer { + pub version: String, + pub oid: LfsOid, + pub size: u64, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub extra: Vec<(String, String)>, +} + +impl LfsPointer { + pub fn from_bytes(data: &[u8]) -> Option { + Self::from_str(std::str::from_utf8(data).ok()?) + } + + pub fn from_str(s: &str) -> Option { + let mut version = None; + let mut oid = None; + let mut size = None; + let mut extra = Vec::new(); + + for line in s.lines() { + let line = line.trim(); + if line.is_empty() { + continue; + } + if let Some(val) = line.strip_prefix("version ") { + version = Some(val.trim().to_string()); + } else if let Some(val) = line.strip_prefix("oid sha256:") { + oid = Some(LfsOid::new(val.trim())); + } else if let Some(val) = line.strip_prefix("size ") { + size = val.trim().parse::().ok(); + } else if let Some((k, v)) = line.split_once(' ') { + extra.push((k.to_string(), v.to_string())); + } + } + + let version = version?; + if version != "https://git-lfs.github.com/spec/v1" { + return None; + } + let oid = oid?; + if !oid.is_valid() { + return None; + } + let size = size?; + + Some(Self { + version, + oid, + size, + extra, + }) + } + + pub fn to_string(&self) -> String { + let mut lines = Vec::with_capacity(3 + self.extra.len()); + lines.push(format!("version {}", self.version)); + lines.push(format!("oid sha256:{}", self.oid)); + lines.push(format!("size {}", self.size)); + for (k, v) in &self.extra { + lines.push(format!("{} {}", k, v)); + } + lines.join("\n") + "\n" + } + + /// Path inside `.git/lfs/objects/` where this object would be stored locally. + pub fn local_object_path(&self, git_dir: &std::path::Path) -> std::path::PathBuf { + let oid = self.oid.as_str(); + // LFS OIDs must be at least 4 chars (2+2 split) for the path layout. + // Valid LFS OIDs are 64-char SHA256 hex, so this is always safe for valid pointers. + // Defensive check: if OID is somehow invalid, use the full OID as filename. + if oid.len() < 4 { + return git_dir.join("lfs").join("objects").join(oid); + } + let (p1, rest) = oid.split_at(2); + let (p2, _) = rest.split_at(2); + git_dir + .join("lfs") + .join("objects") + .join(p1) + .join(p2) + .join(oid) + } +} + +/// Metadata for an LFS-managed file in a tree entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LfsEntry { + pub path: String, + pub pointer: LfsPointer, + pub size: u64, +} + +/// LFS configuration for a repository. +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct LfsConfig { + pub endpoint: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub access_token: Option, +} + +impl LfsConfig { + pub fn new() -> Self { + Self::default() + } + + pub fn endpoint(mut self, url: &str) -> Self { + self.endpoint = Some(url.to_string()); + self + } + + pub fn access_token(mut self, token: &str) -> Self { + self.access_token = Some(token.to_string()); + self + } +} diff --git a/libs/git/lib.rs b/libs/git/lib.rs new file mode 100644 index 0000000..4c11430 --- /dev/null +++ b/libs/git/lib.rs @@ -0,0 +1,45 @@ +pub mod archive; +pub mod blame; +pub mod blob; +pub mod branch; +pub mod commit; +pub mod config; +pub mod description; +pub mod diff; +pub mod domain; +pub mod error; +pub mod hook; +pub mod http; +pub mod lfs; +pub mod merge; +pub(crate) mod ref_utils; +pub mod reference; +pub mod ssh; +pub mod tags; +pub mod tree; + +pub use archive::types::{ArchiveEntry, ArchiveFormat, ArchiveOptions, ArchiveSummary}; +pub use blame::ops::BlameOptions; +pub use blob::types::{BlobContent, BlobInfo}; +pub use branch::types::{BranchDiff, BranchInfo, BranchSummary}; +pub use commit::graph::{CommitGraph, CommitGraphLine, CommitGraphOptions}; +pub use commit::traverse::CommitWalkOptions; +pub use commit::types::{ + CommitBlameHunk, CommitBlameLine, CommitDiffFile, CommitDiffHunk, CommitDiffStats, CommitMeta, + CommitOid, CommitRefInfo, CommitReflogEntry, CommitSignature, CommitSort, +}; +pub use config::types::{ConfigEntry, ConfigSnapshot}; +pub use diff::ops::diff_to_side_by_side; +pub use diff::types::{ + DiffDelta, DiffDeltaStatus, DiffFile, DiffHunk, DiffLine, DiffOptions, DiffResult, DiffStats, + SideBySideChangeType, SideBySideDiffResult, SideBySideFile, SideBySideLine, +}; +pub use domain::GitDomain; +pub use error::{GitError, GitResult}; +pub use hook::pool::GitHookPool; +pub use hook::pool::types::{HookTask, PoolConfig, PoolMetrics, TaskType}; +pub use lfs::types::{LfsConfig, LfsEntry, LfsOid, LfsPointer}; +pub use merge::types::{MergeAnalysisResult, MergeOptions, MergePreferenceResult, MergeheadInfo}; +pub use reference::types::RefInfo; +pub use tags::types::{TagInfo, TagSummary}; +pub use tree::types::{TreeEntry, TreeInfo}; diff --git a/libs/git/merge/mod.rs b/libs/git/merge/mod.rs new file mode 100644 index 0000000..d912fd0 --- /dev/null +++ b/libs/git/merge/mod.rs @@ -0,0 +1,3 @@ +//! Merge domain — all merge-related operations on a GitDomain. +pub mod ops; +pub mod types; diff --git a/libs/git/merge/ops.rs b/libs/git/merge/ops.rs new file mode 100644 index 0000000..b6c5b91 --- /dev/null +++ b/libs/git/merge/ops.rs @@ -0,0 +1,345 @@ +//! Merge operations. + +use crate::commit::types::CommitOid; +use crate::merge::types::{ + MergeAnalysisResult, MergeOptions, MergePreferenceResult, MergeheadInfo, +}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn merge_analysis( + &self, + their_oid: &CommitOid, + ) -> GitResult<(MergeAnalysisResult, MergePreferenceResult)> { + let oid = their_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(their_oid.to_string()))?; + + let head_ref = self + .repo() + .find_reference("HEAD") + .map_err(|e| GitError::Internal(e.to_string()))?; + let annotated = self + .repo() + .reference_to_annotated_commit(&head_ref) + .map_err(|e| GitError::Internal(e.to_string()))?; + let their_annotated = self + .repo() + .find_annotated_commit(oid) + .map_err(|e: git2::Error| GitError::Internal(e.to_string()))?; + + let (analysis, pref) = self + .repo() + .merge_analysis(&[&annotated, &their_annotated]) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(( + MergeAnalysisResult::from_git2(analysis), + MergePreferenceResult::from_git2(pref), + )) + } + + pub fn merge_analysis_for_ref( + &self, + ref_name: &str, + their_oid: &CommitOid, + ) -> GitResult<(MergeAnalysisResult, MergePreferenceResult)> { + let oid = their_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(their_oid.to_string()))?; + + let reference = self + .repo() + .find_reference(ref_name) + .map_err(|e| GitError::Internal(e.to_string()))?; + let their_annotated = self + .repo() + .find_annotated_commit(oid) + .map_err(|e: git2::Error| GitError::Internal(e.to_string()))?; + + let (analysis, pref) = self + .repo() + .merge_analysis_for_ref(&reference, &[&their_annotated]) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(( + MergeAnalysisResult::from_git2(analysis), + MergePreferenceResult::from_git2(pref), + )) + } + + pub fn merge_base(&self, oid1: &CommitOid, oid2: &CommitOid) -> GitResult { + let o1 = oid1 + .to_oid() + .map_err(|_| GitError::InvalidOid(oid1.to_string()))?; + let o2 = oid2 + .to_oid() + .map_err(|_| GitError::InvalidOid(oid2.to_string()))?; + + let base = self + .repo() + .merge_base(o1, o2) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(base)) + } + + pub fn merge_base_many(&self, oids: &[CommitOid]) -> GitResult { + let oids: Vec<_> = oids + .iter() + .map(|o| o.to_oid().map_err(|_| GitError::InvalidOid(o.to_string()))) + .collect::, _>>()?; + + let base = self + .repo() + .merge_base_many(&oids) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(base)) + } + + pub fn merge_base_octopus(&self, oids: &[CommitOid]) -> GitResult { + let oids: Vec<_> = oids + .iter() + .map(|o| o.to_oid().map_err(|_| GitError::InvalidOid(o.to_string()))) + .collect::, _>>()?; + + let base = self + .repo() + .merge_base_octopus(&oids) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(base)) + } + + pub fn merge_commits( + &self, + local_commit: &CommitOid, + remote_commit: &CommitOid, + opts: Option, + ) -> GitResult<()> { + let local_oid = local_commit + .to_oid() + .map_err(|_| GitError::InvalidOid(local_commit.to_string()))?; + let remote_oid = remote_commit + .to_oid() + .map_err(|_| GitError::InvalidOid(remote_commit.to_string()))?; + + let local = self + .repo() + .find_commit(local_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let remote = self + .repo() + .find_commit(remote_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut merge_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::MergeOptions::new); + + self.repo() + .merge_commits(&local, &remote, Some(&mut merge_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(()) + } + + pub fn merge_trees( + &self, + ancestor_tree: &CommitOid, + our_tree: &CommitOid, + their_tree: &CommitOid, + opts: Option, + ) -> GitResult<()> { + let ancestor_oid = ancestor_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(ancestor_tree.to_string()))?; + let our_oid = our_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(our_tree.to_string()))?; + let their_oid = their_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(their_tree.to_string()))?; + + let ancestor = self + .repo() + .find_tree(ancestor_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let ours = self + .repo() + .find_tree(our_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let theirs = self + .repo() + .find_tree(their_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut merge_opts = opts + .map(|o| o.to_git2()) + .unwrap_or_else(git2::MergeOptions::new); + + self.repo() + .merge_trees(&ancestor, &ours, &theirs, Some(&mut merge_opts)) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(()) + } + + pub fn merge_abort(&self) -> GitResult<()> { + self.repo() + .cleanup_state() + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn merge_is_in_progress(&self) -> bool { + matches!( + self.repo().state(), + git2::RepositoryState::Merge + | git2::RepositoryState::Revert + | git2::RepositoryState::RevertSequence + | git2::RepositoryState::CherryPick + | git2::RepositoryState::CherryPickSequence + ) + } + + pub fn mergehead_list(&mut self) -> GitResult> { + let mut heads = Vec::new(); + self.repo_mut()? + .mergehead_foreach(|oid| { + heads.push(MergeheadInfo { + oid: CommitOid::from_git2(*oid), + }); + true + }) + .map_err(|e: git2::Error| GitError::Internal(e.to_string()))?; + Ok(heads) + } + + pub fn merge_is_conflicted(&self) -> bool { + self.repo() + .index() + .map(|idx| idx.has_conflicts()) + .unwrap_or(false) + } + + /// Squash all commits from `source_branch` into a single commit on top of `base`. + pub fn squash_commits(&self, base: &CommitOid, source_branch: &str) -> GitResult { + let base_oid = base + .to_oid() + .map_err(|_| GitError::InvalidOid(base.to_string()))?; + + let source_ref = self + .repo() + .find_reference(source_branch) + .map_err(|e| GitError::Internal(e.to_string()))?; + let head_oid = source_ref + .target() + .ok_or_else(|| GitError::Internal("Branch has no target OID".to_string()))?; + + // Get the merge base (the common ancestor) + let merge_base = self + .repo() + .merge_base(base_oid, head_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Collect all commits from merge_base (exclusive) to head (inclusive) + let mut revwalk = self + .repo() + .revwalk() + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .push(head_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + revwalk + .hide(merge_base) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut commits: Vec = Vec::new(); + for oid_result in revwalk { + let oid = oid_result.map_err(|e| GitError::Internal(e.to_string()))?; + let commit = self + .repo() + .find_commit(oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + commits.push(commit); + } + + if commits.is_empty() { + // Nothing to squash — return base as-is + return Ok(CommitOid::from_git2(base_oid)); + } + + // Sort commits oldest-first (topological order) + commits.sort_by_key(|c| c.time().seconds()); + + // Apply all patches onto a temporary tree. + // Strategy: apply patches sequentially using `git2::apply` on the index. + let base_tree = self + .repo() + .find_commit(base_oid) + .map_err(|e| GitError::Internal(e.to_string()))? + .tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Build a diff from the accumulated patches + let sig = self + .repo() + .signature() + .map_err(|e| GitError::Internal(e.to_string()))?; + + // Apply each commit's diff sequentially to build the squash tree. + let mut current_tree = base_tree; + for commit in &commits { + let commit_tree = commit + .tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + let diff = self + .repo() + .diff_tree_to_tree(Some(¤t_tree), Some(&commit_tree), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + // apply_to_tree applies the diff to current_tree, returning a new Index. + let mut new_index = self + .repo() + .apply_to_tree(¤t_tree, &diff, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let new_tree_oid = new_index + .write_tree() + .map_err(|e| GitError::Internal(e.to_string()))?; + + current_tree = self + .repo() + .find_tree(new_tree_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + + let squash_tree = current_tree; + + // Build the squash commit message: list all squashed commits + let mut msg = String::new(); + for commit in &commits { + if !msg.is_empty() { + msg.push_str("\n"); + } + msg.push_str(&format!("- {}", commit.summary().unwrap_or("(no message)"))); + } + + // Create the squash commit on top of base + let squash_oid = self + .repo() + .commit( + Some("HEAD"), + &sig, + &sig, + &msg, + &squash_tree, + &[&self + .repo() + .find_commit(base_oid) + .map_err(|e| GitError::Internal(e.to_string()))?], + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(CommitOid::from_git2(squash_oid)) + } +} diff --git a/libs/git/merge/types.rs b/libs/git/merge/types.rs new file mode 100644 index 0000000..a7cf7a5 --- /dev/null +++ b/libs/git/merge/types.rs @@ -0,0 +1,112 @@ +//! Serializable types for the merge domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MergeAnalysisResult { + pub is_none: bool, + pub is_normal: bool, + pub is_up_to_date: bool, + pub is_fast_forward: bool, + pub is_unborn: bool, +} + +impl MergeAnalysisResult { + pub fn from_git2(analysis: git2::MergeAnalysis) -> Self { + Self { + is_none: analysis.is_none(), + is_normal: analysis.is_normal(), + is_up_to_date: analysis.is_up_to_date(), + is_fast_forward: analysis.is_fast_forward(), + is_unborn: analysis.is_unborn(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MergePreferenceResult { + pub is_none: bool, + pub is_no_fast_forward: bool, + pub is_fastforward_only: bool, +} + +impl MergePreferenceResult { + pub fn from_git2(pref: git2::MergePreference) -> Self { + Self { + is_none: pref.is_none(), + is_no_fast_forward: pref.is_no_fast_forward(), + is_fastforward_only: pref.is_fastforward_only(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MergeheadInfo { + pub oid: CommitOid, +} + +#[derive(Debug, Clone, Default)] +pub struct MergeOptions { + find_renames: bool, + fail_on_conflict: bool, + skip_reuc: bool, + no_recursive: bool, + rename_threshold: u32, + target_limit: u32, + recursion_limit: u32, +} + +impl MergeOptions { + pub fn new() -> Self { + Self::default() + } + + pub fn find_renames(mut self, find: bool) -> Self { + self.find_renames = find; + self + } + + pub fn fail_on_conflict(mut self, fail: bool) -> Self { + self.fail_on_conflict = fail; + self + } + + pub fn skip_reuc(mut self, skip: bool) -> Self { + self.skip_reuc = skip; + self + } + + pub fn no_recursive(mut self, disable: bool) -> Self { + self.no_recursive = disable; + self + } + + pub fn rename_threshold(mut self, thresh: u32) -> Self { + self.rename_threshold = thresh; + self + } + + pub fn target_limit(mut self, limit: u32) -> Self { + self.target_limit = limit; + self + } + + pub fn recursion_limit(mut self, limit: u32) -> Self { + self.recursion_limit = limit; + self + } + + pub fn to_git2(&self) -> git2::MergeOptions { + let mut opts = git2::MergeOptions::new(); + opts.find_renames(self.find_renames); + opts.fail_on_conflict(self.fail_on_conflict); + opts.skip_reuc(self.skip_reuc); + opts.no_recursive(self.no_recursive); + opts.rename_threshold(self.rename_threshold); + opts.target_limit(self.target_limit); + opts.recursion_limit(self.recursion_limit); + opts + } +} diff --git a/libs/git/ref_utils.rs b/libs/git/ref_utils.rs new file mode 100644 index 0000000..9560ffc --- /dev/null +++ b/libs/git/ref_utils.rs @@ -0,0 +1,35 @@ +//! Shared utility functions for reference name validation. + +use crate::GitError; + +/// # Rules +/// - Must not be empty +/// - Must not start with '.' +/// - Must not end with '/' +/// - Must not contain '..' +/// - Must not contain spaces, '~', '^', ':', '?', '*', '[', or '\' +/// +/// # Returns +/// - `Ok(())` if name is valid +/// - `Err(GitError::InvalidRefName)` if name is invalid +pub fn validate_ref_name(name: &str) -> Result<(), GitError> { + if name.is_empty() + || name.starts_with('.') + || name.ends_with('/') + || name.contains("..") + || name.contains(' ') + || name.contains('~') + || name.contains('^') + || name.contains(':') + || name.contains('?') + || name.contains('*') + || name.contains('[') + || name.contains('\\') + { + return Err(GitError::InvalidRefName(format!( + "invalid ref name: {}", + name + ))); + } + Ok(()) +} diff --git a/libs/git/reference/mod.rs b/libs/git/reference/mod.rs new file mode 100644 index 0000000..2d742b2 --- /dev/null +++ b/libs/git/reference/mod.rs @@ -0,0 +1,3 @@ +//! Reference domain — low-level ref operations with CAS support. +pub mod ops; +pub mod types; diff --git a/libs/git/reference/ops.rs b/libs/git/reference/ops.rs new file mode 100644 index 0000000..fe0719f --- /dev/null +++ b/libs/git/reference/ops.rs @@ -0,0 +1,257 @@ +//! Reference operations. + +use crate::commit::types::CommitOid; +use crate::reference::types::RefInfo; +use crate::{GitDomain, GitError, GitResult}; + +/// Specifies what the reference update should be based on. +pub enum RefUpdateTarget { + Oid(CommitOid), +} + +/// Result of a reference update operation. +pub struct RefUpdateResult { + pub name: String, + pub old_oid: Option, + pub new_oid: Option, +} + +impl GitDomain { + /// List all references matching a pattern (e.g. "refs/heads/*"). + pub fn ref_list(&self, pattern: Option<&str>) -> GitResult> { + let mut refs = Vec::new(); + let iter = self + .repo() + .references() + .map_err(|e| GitError::Internal(e.to_string()))?; + + for result in iter { + let r = result.map_err(|e| GitError::Internal(e.to_string()))?; + let name = match r.name() { + Some(n) => n.to_string(), + None => continue, + }; + + if let Some(pat) = pattern { + if !name_match(&name, pat) { + continue; + } + } + + let target = r.target().map(CommitOid::from_git2); + let oid = r + .peel_to_commit() + .ok() + .map(|c| CommitOid::from_git2(c.id())); + + let is_symbolic = r.kind() == Some(git2::ReferenceType::Symbolic); + let is_branch = name.starts_with("refs/heads/"); + let is_remote = name.starts_with("refs/remotes/"); + let is_tag = name.starts_with("refs/tags/"); + let is_note = name.starts_with("refs/notes/"); + refs.push(RefInfo { + name, + oid, + target, + is_symbolic, + is_branch, + is_remote, + is_tag, + is_note, + }); + } + + Ok(refs) + } + + pub fn ref_get(&self, name: &str) -> GitResult { + let r = self + .repo() + .find_reference(name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target = r.target().map(CommitOid::from_git2); + let oid = r + .peel_to_commit() + .ok() + .map(|c| CommitOid::from_git2(c.id())); + + Ok(RefInfo { + name: name.to_string(), + oid, + target, + is_symbolic: r.kind() == Some(git2::ReferenceType::Symbolic), + is_branch: name.starts_with("refs/heads/"), + is_remote: name.starts_with("refs/remotes/"), + is_tag: name.starts_with("refs/tags/"), + is_note: name.starts_with("refs/notes/"), + }) + } + + pub fn ref_create( + &self, + name: &str, + oid: CommitOid, + force: bool, + message: Option<&str>, + ) -> GitResult { + let git_oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + + let old = self.repo().find_reference(name).ok(); + let old_oid = old + .as_ref() + .and_then(|r| r.target().map(CommitOid::from_git2)); + + self.repo() + .reference(name, git_oid, force, message.unwrap_or("create ref")) + .map_err(|e| { + if !force && e.code() == git2::ErrorCode::Exists { + GitError::BranchExists(name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + Ok(RefUpdateResult { + name: name.to_string(), + old_oid, + new_oid: Some(oid), + }) + } + + pub fn ref_delete(&self, name: &str) -> GitResult { + let mut r = self + .repo() + .find_reference(name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target = r + .target() + .map(CommitOid::from_git2) + .ok_or_else(|| GitError::Internal("ref has no target".to_string()))?; + + r.delete().map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(target) + } + + /// Rename a reference. Fails if new name already exists unless `force` is true. + pub fn ref_rename(&self, old_name: &str, new_name: &str, force: bool) -> GitResult { + let mut r = self + .repo() + .find_reference(old_name) + .map_err(|_e| GitError::RefNotFound(old_name.to_string()))?; + + let target = r.target().map(CommitOid::from_git2); + let oid = r + .peel_to_commit() + .ok() + .map(|c| CommitOid::from_git2(c.id())); + let ref_kind = r.kind(); // Capture kind before rename + + if !force && self.repo().find_reference(new_name).is_ok() { + return Err(GitError::BranchExists(new_name.to_string())); + } + + r.rename(new_name, force, "rename ref") + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(RefInfo { + name: new_name.to_string(), + oid, + target, + is_symbolic: ref_kind == Some(git2::ReferenceType::Symbolic), + is_branch: new_name.starts_with("refs/heads/"), + is_remote: new_name.starts_with("refs/remotes/"), + is_tag: new_name.starts_with("refs/tags/"), + is_note: new_name.starts_with("refs/notes/"), + }) + } + + pub fn ref_update( + &self, + name: &str, + new_oid: CommitOid, + expected_oid: Option, + message: Option<&str>, + ) -> GitResult { + let old = self + .repo() + .find_reference(name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let old_oid = old.target().map(CommitOid::from_git2); + + // CAS check + if let Some(expected) = expected_oid { + let git_expected = expected + .to_oid() + .map_err(|_| GitError::InvalidOid(expected.to_string()))?; + if old.target() != Some(git_expected) { + return Err(GitError::Internal( + "ref update failed: unexpected current value (CAS mismatch)".to_string(), + )); + } + } + + let git_new_oid = new_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(new_oid.to_string()))?; + + // Use reference_matching for CAS. Pass None as previous target only when + // the ref has no target (symbolic ref with broken target) — fall back to + // unconditional reference update in that case. + match old.target() { + Some(prev) => { + self.repo() + .reference_matching( + name, + git_new_oid, + true, + prev, + message.unwrap_or("update ref"), + ) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + None => { + self.repo() + .reference(name, git_new_oid, true, message.unwrap_or("update ref")) + .map_err(|e| GitError::Internal(e.to_string()))?; + } + } + + Ok(RefUpdateResult { + name: name.to_string(), + old_oid, + new_oid: Some(new_oid), + }) + } + + pub fn ref_exists(&self, name: &str) -> bool { + self.repo().find_reference(name).is_ok() + } + + /// Get the peeled (commit) OID of a reference. + pub fn ref_target(&self, name: &str) -> GitResult> { + let r = self + .repo() + .find_reference(name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + Ok(r.peel_to_commit() + .ok() + .map(|c| CommitOid::from_git2(c.id()))) + } +} + +fn name_match(name: &str, pattern: &str) -> bool { + if let Some(stripped) = pattern.strip_suffix("/**") { + name.starts_with(stripped) + } else if let Some(stripped) = pattern.strip_suffix("/*") { + name.starts_with(stripped) && !name[stripped.len()..].contains('/') + } else { + name == pattern + } +} diff --git a/libs/git/reference/types.rs b/libs/git/reference/types.rs new file mode 100644 index 0000000..469c0f6 --- /dev/null +++ b/libs/git/reference/types.rs @@ -0,0 +1,18 @@ +//! Serializable types for the reference domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +/// A lightweight ref entry for listing. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RefInfo { + pub name: String, + pub oid: Option, + pub target: Option, + pub is_symbolic: bool, + pub is_branch: bool, + pub is_remote: bool, + pub is_tag: bool, + pub is_note: bool, +} diff --git a/libs/git/ssh/authz.rs b/libs/git/ssh/authz.rs new file mode 100644 index 0000000..55c708f --- /dev/null +++ b/libs/git/ssh/authz.rs @@ -0,0 +1,307 @@ +use crate::error::GitError; +use base64::{Engine as _, engine::general_purpose}; +use db::database::AppDatabase; +use models::projects::MemberRole; +use models::projects::{project, project_history_name, project_members}; +use models::repos::{repo, repo_history_name}; +use models::users::{user, user_ssh_key}; +use sea_orm::sqlx::types::chrono; +use sea_orm::*; +use sha2::{Digest, Sha256}; +use slog::{Logger, error, info, warn}; + +/// SSH authentication service optimized for performance +pub struct SshAuthService { + db: AppDatabase, + logger: Logger, +} + +impl SshAuthService { + pub fn new(db: AppDatabase, logger: Logger) -> Self { + Self { db, logger } + } + + pub async fn find_repo( + &self, + namespace: &str, + repo_name: &str, + ) -> Result { + let namespace = self.find_namespace(namespace).await?; + self.find_repository_by_name_and_project(repo_name, namespace.id) + .await + } + + async fn find_namespace(&self, namespace: &str) -> Result { + if let Some(project) = project::Entity::find() + .filter(project::Column::Name.eq(namespace)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + return Ok(project); + } + + if let Some(history) = project_history_name::Entity::find() + .filter(project_history_name::Column::HistoryName.eq(namespace)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + if let Some(project) = project::Entity::find() + .filter(project::Column::Id.eq(history.project_uid)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + return Ok(project); + } + } + + Err(GitError::NotFound("Project not found".to_string())) + } + + async fn find_repository_by_name_and_project( + &self, + repo_name: &str, + project_id: uuid::Uuid, + ) -> Result { + if let Some(repo) = repo::Entity::find() + .filter(repo::Column::RepoName.eq(repo_name)) + .filter(repo::Column::Project.eq(project_id)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + return Ok(repo); + } + + if let Some(history) = repo_history_name::Entity::find() + .filter(repo_history_name::Column::Name.eq(repo_name)) + .filter(repo_history_name::Column::Project.eq(project_id)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + if let Some(repo) = repo::Entity::find() + .filter(repo::Column::Id.eq(history.repo)) + .filter(repo::Column::Project.eq(project_id)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))? + { + return Ok(repo); + } + } + + Err(GitError::NotFound("Repository not found".to_string())) + } + + pub async fn find_user_by_public_key( + &self, + public_key_str: &str, + ) -> Result, DbErr> { + let fingerprint = match self.generate_fingerprint_from_public_key(public_key_str) { + Ok(fp) => fp, + Err(e) => { + error!(self.logger, "Failed to generate fingerprint"; "error" => %e); + return Ok(None); + } + }; + + let fingerprint_preview = if fingerprint.len() > 16 { + format!("{}...", &fingerprint[..16]) + } else { + fingerprint.clone() + }; + info!(self.logger, "Looking up user with SSH key"; "fingerprint" => %fingerprint_preview); + + let ssh_key = user_ssh_key::Entity::find() + .filter(user_ssh_key::Column::Fingerprint.eq(&fingerprint)) + .filter(user_ssh_key::Column::IsRevoked.eq(false)) + .one(self.db.reader()) + .await?; + + let ssh_key = match ssh_key { + Some(key) => key, + None => { + warn!(self.logger, "No SSH key found"; "fingerprint" => %fingerprint); + return Ok(None); + } + }; + + if self.is_key_expired(&ssh_key) { + warn!(self.logger, "SSH key expired"; "key_id" => ssh_key.id, "expires_at" => ?ssh_key.expires_at); + return Ok(None); + } + + let user_model = user::Entity::find() + .filter(user::Column::Uid.eq(ssh_key.user)) + .one(self.db.reader()) + .await?; + + if let Some(ref user) = user_model { + info!(self.logger, "User authenticated"; "user" => %user.username, "key" => %ssh_key.title); + self.update_key_last_used_async(ssh_key.id); + } + + Ok(user_model) + } + + fn is_key_expired(&self, ssh_key: &user_ssh_key::Model) -> bool { + if let Some(expires_at) = ssh_key.expires_at { + let now = chrono::Utc::now(); + now >= expires_at + } else { + false + } + } + + fn update_key_last_used_async(&self, key_id: i64) { + let db_clone = self.db.clone(); + let logger = self.logger.clone(); + tokio::spawn(async move { + if let Err(e) = Self::update_key_last_used_sync(db_clone, &logger, key_id).await { + warn!(&logger, "Failed to update key last_used"; "key_id" => key_id, "error" => %e); + } + }); + } + + async fn update_key_last_used_sync( + db: AppDatabase, + logger: &Logger, + key_id: i64, + ) -> Result<(), DbErr> { + let key = user_ssh_key::Entity::find_by_id(key_id) + .one(db.reader()) + .await?; + + if let Some(key) = key { + let now = chrono::Utc::now(); + let mut active_key: user_ssh_key::ActiveModel = key.into(); + active_key.last_used_at = Set(Some(now)); + active_key.updated_at = Set(now); + + active_key.update(db.writer()).await?; + info!(logger, "Updated key last_used"; "key_id" => key_id); + } + + Ok(()) + } + + pub async fn check_repo_permission( + &self, + user: &user::Model, + repo: &repo::Model, + is_write: bool, + ) -> bool { + if repo.created_by == user.uid { + info!(self.logger, "User is repo owner"; "user" => %user.username, "repo" => %repo.repo_name); + return true; + } + + if !is_write && !repo.is_private { + info!(self.logger, "Public repo allows read"; "repo" => %repo.repo_name); + return true; + } + + if self + .check_collaborator_permission(user, repo, is_write) + .await + .unwrap_or(false) + { + info!(self.logger, "User has collaborator access"; "user" => %user.username, "repo" => %repo.repo_name); + return true; + } + + let project_id = repo.project; + if self + .check_project_member_permission(user, project_id, is_write) + .await + .unwrap_or(false) + { + info!(self.logger, "User has project member access"; "user" => %user.username, "repo" => %repo.repo_name); + return true; + } + + warn!(self.logger, "Access denied"; "user" => %user.username, "repo" => %repo.repo_name, "write" => is_write); + false + } + + async fn check_collaborator_permission( + &self, + user: &user::Model, + repo: &repo::Model, + is_write: bool, + ) -> Result { + use models::repos::repo_collaborator; + + let collaborator = repo_collaborator::Entity::find() + .filter(repo_collaborator::Column::Repo.eq(repo.id)) + .filter(repo_collaborator::Column::User.eq(user.uid)) + .one(self.db.reader()) + .await?; + + if let Some(collab) = collaborator { + let roles: Vec<&str> = collab.scope.split_whitespace().collect(); + if roles.contains(&"admin") || roles.contains(&"write") { + return Ok(true); + } + + if roles.contains(&"read") && !is_write { + return Ok(true); + } + + warn!(self.logger, "Collaborator has no valid roles"; "scope" => %collab.scope); + Ok(false) + } else { + Ok(false) + } + } + + async fn check_project_member_permission( + &self, + user: &user::Model, + project_id: uuid::Uuid, + is_write: bool, + ) -> Result { + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_id)) + .filter(project_members::Column::User.eq(user.uid)) + .one(self.db.reader()) + .await?; + + if let Some(member) = member { + match member.scope_role() { + Ok(MemberRole::Admin) | Ok(MemberRole::Owner) => Ok(true), + Ok(MemberRole::Member) => Ok(!is_write), + Err(_) => Ok(false), + } + } else { + Ok(false) + } + } + + fn generate_fingerprint_from_public_key(&self, public_key_str: &str) -> Result { + // Performance: avoid allocating Vec, use split_once for efficiency + let key_data_base64 = public_key_str + .split_whitespace() + .nth(1) + .ok_or("Invalid SSH key format")?; + + let key_data = general_purpose::STANDARD + .decode(key_data_base64) + .map_err(|e| format!("Base64 decode error: {}", e))?; + + // Performance: SHA256 is already optimized, compute hash directly + let mut hasher = Sha256::new(); + hasher.update(&key_data); + let hash = hasher.finalize(); + + // Performance: pre-allocate string capacity to avoid reallocation + let mut fingerprint = String::with_capacity(51); // "SHA256:" (7) + base64 (44) + fingerprint.push_str("SHA256:"); + fingerprint.push_str(&general_purpose::STANDARD_NO_PAD.encode(&hash)); + + Ok(fingerprint) + } +} diff --git a/libs/git/ssh/handle.rs b/libs/git/ssh/handle.rs new file mode 100644 index 0000000..a103530 --- /dev/null +++ b/libs/git/ssh/handle.rs @@ -0,0 +1,889 @@ +use crate::ssh::ReceiveSyncService; +use crate::ssh::RepoReceiveSyncTask; +use crate::ssh::SshTokenService; +use crate::ssh::authz::SshAuthService; +use crate::ssh::rate_limit::SshRateLimiter; +use db::cache::AppCache; +use db::database::AppDatabase; +use models::repos::{repo, repo_branch_protect}; +use models::users::user; +use russh::keys::{Certificate, PublicKey}; +use russh::server::{Auth, Handle, Msg, Session}; +use russh::{Channel, ChannelId, CryptoVec, Disconnect}; +use sea_orm::ColumnTrait; +use sea_orm::EntityTrait; +use sea_orm::QueryFilter; +use slog::{Logger, error, info, warn}; +use std::collections::{HashMap, HashSet}; +use std::io; +use std::net::SocketAddr; +use std::path::PathBuf; +use std::process::Stdio; +use std::str::FromStr; +use std::sync::Arc; +use std::time::Duration; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt}; +use tokio::process::ChildStdin; +use tokio::sync::mpsc::Sender; +use tokio::time::sleep; + +#[derive(Clone, Debug)] +pub struct RefUpdate { + pub name: String, + pub old_oid: String, + pub new_oid: String, +} + +impl RefUpdate { + /// Parse git reference update commands from SSH protocol text. + /// Format: " \n" + pub fn parse_ref_updates(data: &[u8]) -> Result, String> { + let text = String::from_utf8_lossy(data); + let mut refs = Vec::new(); + for line in text.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') || line.starts_with("PACK") { + continue; + } + let mut parts = line.split_whitespace(); + let old_oid = parts.next().map(|s| s.to_string()).unwrap_or_default(); + let new_oid = parts.next().map(|s| s.to_string()).unwrap_or_default(); + let name = parts + .next() + .unwrap_or("") + .trim_start_matches('\0') + .to_string(); + if !name.is_empty() { + refs.push(RefUpdate { + old_oid, + new_oid, + name, + }); + } + } + Ok(refs) + } +} + +pub struct SSHandle { + pub repo: Option, + pub model: Option, + pub stdin: HashMap, + pub eof: HashMap>, + pub operator: Option, + pub db: AppDatabase, + pub auth: SshAuthService, + pub buffer: HashMap>, + pub branch: HashMap>, + pub service: Option, + pub cache: AppCache, + pub sync: ReceiveSyncService, + pub upload_pack_eof_sent: HashSet, + pub logger: Logger, + pub rate_limiter: Arc, + pub token_service: SshTokenService, + pub client_addr: Option, +} + +impl SSHandle { + pub fn new( + db: AppDatabase, + cache: AppCache, + sync: ReceiveSyncService, + logger: Logger, + rate_limiter: Arc, + token_service: SshTokenService, + client_addr: Option, + ) -> Self { + let auth = SshAuthService::new(db.clone(), logger.clone()); + let addr_str = client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + info!(logger, "SSH handler created for client: {}", addr_str); + Self { + repo: None, + model: None, + stdin: HashMap::new(), + eof: HashMap::new(), + operator: None, + db, + auth, + buffer: HashMap::new(), + branch: HashMap::new(), + service: None, + cache, + sync, + upload_pack_eof_sent: HashSet::new(), + logger, + rate_limiter, + token_service, + client_addr, + } + } + + fn cleanup_channel(&mut self, channel_id: ChannelId) { + if let Some(mut stdin) = self.stdin.remove(&channel_id) { + tokio::spawn(async move { + stdin.flush().await.ok(); + let _ = stdin.shutdown().await; + }); + } + self.eof.remove(&channel_id); + self.upload_pack_eof_sent.remove(&channel_id); + } +} + +impl Drop for SSHandle { + fn drop(&mut self) { + let addr_str = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + info!(self.logger, "SSH handler dropped for client: {}", addr_str); + + let channel_ids: Vec<_> = self.stdin.keys().copied().collect(); + for channel_id in channel_ids { + self.cleanup_channel(channel_id); + } + } +} + +impl russh::server::Handler for SSHandle { + type Error = russh::Error; + + async fn auth_none(&mut self, user: &str) -> Result { + let client_info = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + info!( + self.logger, + "auth_none received for user '{}', client: {}", user, client_info + ); + Ok(Auth::UnsupportedMethod) + } + + async fn auth_password(&mut self, user: &str, token: &str) -> Result { + let client_info = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + + if user != "git" { + warn!( + self.logger, + "auth_password rejected: invalid username '{}', client: {}", user, client_info + ); + return Err(russh::Error::NotAuthenticated); + } + + if token.is_empty() { + warn!( + self.logger, + "auth_password rejected: empty token, client: {}", client_info + ); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "Attempting SSH token authentication, client: {}", client_info + ); + + let user_model = match self.token_service.find_user_by_token(token).await { + Ok(Some(model)) => model, + Ok(None) => { + warn!( + self.logger, + "SSH token auth rejected: token not found or expired, client: {}", client_info + ); + return Err(russh::Error::NotAuthenticated); + } + Err(e) => { + error!( + self.logger, + "SSH token auth error: {}, client: {}", e, client_info + ); + return Err(russh::Error::NotAuthenticated); + } + }; + + let user_id = user_model.uid.to_string(); + if !self.rate_limiter.is_user_allowed(&user_id).await { + warn!( + self.logger, + "SSH token auth rate limit exceeded: {}, client: {}", + user_model.username, + client_info + ); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "SSH token authentication successful: user={}, client={}", + user_model.username, + client_info + ); + self.operator = Some(user_model); + Ok(Auth::Accept) + } + async fn auth_publickey_offered( + &mut self, + user: &str, + public_key: &PublicKey, + ) -> Result { + self.auth_publickey(user, public_key).await + } + async fn auth_publickey( + &mut self, + user: &str, + public_key: &PublicKey, + ) -> Result { + let client_info = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + + if user != "git" { + let msg = format!( + "SSH auth rejected: invalid username '{}', client: {}", + user, client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + let public_key_str = public_key.to_string(); + if public_key_str.len() < 32 { + let msg = format!( + "SSH auth rejected: invalid public key length ({}), client: {}", + public_key_str.len(), + client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "Attempting SSH authentication with public key, client: {}", client_info + ); + let user_model = match self.auth.find_user_by_public_key(&public_key_str).await { + Ok(Some(model)) => model, + Ok(None) => { + let msg = format!( + "SSH auth rejected: public key not found or invalid, client: {}", + client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + Err(e) => { + let msg = format!("SSH auth error: {}, client: {}", e, client_info); + error!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + }; + + let user_id = user_model.uid.to_string(); + if !self.rate_limiter.is_user_allowed(&user_id).await { + let msg = format!( + "User rate limit exceeded: {}, client: {}", + user_model.username, client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "SSH authentication successful: user={}, client={}", user_model.username, client_info + ); + self.operator = Some(user_model); + Ok(Auth::Accept) + } + async fn auth_openssh_certificate( + &mut self, + user: &str, + certificate: &Certificate, + ) -> Result { + let client_info = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + + if user != "git" { + let msg = format!( + "SSH auth rejected: invalid username '{}', client: {}", + user, client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + let public_key_str = certificate.to_string(); + if public_key_str.len() < 32 { + let msg = format!( + "SSH auth rejected: invalid public key length ({}), client: {}", + public_key_str.len(), + client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "Attempting SSH authentication with public key, client: {}", client_info + ); + let user_model = match self.auth.find_user_by_public_key(&public_key_str).await { + Ok(Some(model)) => model, + Ok(None) => { + let msg = format!( + "SSH auth rejected: public key not found or invalid, client: {}", + client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + Err(e) => { + let msg = format!("SSH auth error: {}, client: {}", e, client_info); + error!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + }; + + let user_id = user_model.uid.to_string(); + if !self.rate_limiter.is_user_allowed(&user_id).await { + let msg = format!( + "User rate limit exceeded: {}, client: {}", + user_model.username, client_info + ); + warn!(self.logger, "{}", msg); + return Err(russh::Error::NotAuthenticated); + } + + info!( + self.logger, + "SSH authentication successful: user={}, client={}", user_model.username, client_info + ); + self.operator = Some(user_model); + Ok(Auth::Accept) + } + async fn authentication_banner(&mut self) -> Result, Self::Error> { + Ok(None) + } + + async fn channel_close( + &mut self, + channel: ChannelId, + _: &mut Session, + ) -> Result<(), Self::Error> { + info!(self.logger, "channel_close"; + "channel" => ?channel, + "client" => ?self.client_addr + ); + self.cleanup_channel(channel); + Ok(()) + } + + async fn channel_eof( + &mut self, + channel: ChannelId, + _: &mut Session, + ) -> Result<(), Self::Error> { + info!(self.logger, "channel_eof"; + "channel" => ?channel, + "client" => ?self.client_addr + ); + + if let Some(eof) = self.eof.get(&channel) { + let _ = eof.send(true).await; + } + + if let Some(mut stdin) = self.stdin.remove(&channel) { + info!(self.logger, "Closing stdin"; + "channel" => ?channel, + "client" => ?self.client_addr + ); + let _ = stdin.flush().await; + let _ = stdin.shutdown().await; + info!(self.logger, "stdin closed"; + "channel" => ?channel, + "client" => ?self.client_addr + ); + } else { + warn!(self.logger, "stdin already removed"; + "channel" => ?channel, + "client" => ?self.client_addr + ); + } + + Ok(()) + } + + async fn channel_open_session( + &mut self, + _: Channel, + _: &mut Session, + ) -> Result { + Ok(true) + } + async fn data( + &mut self, + channel: ChannelId, + data: &[u8], + session: &mut Session, + ) -> Result<(), Self::Error> { + if matches!(self.service, Some(GitService::ReceivePack)) { + if !self.branch.contains_key(&channel) { + let bf = self.buffer.entry(channel).or_default(); + bf.extend_from_slice(data); + + if !bf.windows(4).any(|w| w == b"0000") { + return Ok(()); + } + + let buffered = self.buffer.remove(&channel).unwrap_or_default(); + + match RefUpdate::parse_ref_updates(&buffered) { + Ok(refs) => { + if let Some(model) = &self.model { + let branch_protect_roles = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(model.id)) + .all(self.db.reader()) + .await + .map_err(|e| { + dbg!(&e); + russh::Error::IO(io::Error::new(io::ErrorKind::Other, e)) + })?; + + for r#ref in &refs { + if branch_protect_roles + .iter() + .any(|x| r#ref.name.starts_with(&x.branch)) + { + let msg = + format!("remote: Branch '{}' is protected\r\n", r#ref.name); + let _ = session.extended_data( + channel, + 1, + CryptoVec::from_slice(msg.as_bytes()), + ); + let _ = session.exit_status_request(channel, 1); + let _ = session.eof(channel); + let _ = session.close(channel); + self.cleanup_channel(channel); + return Ok(()); + } + } + } + self.branch.insert(channel, refs); + } + Err(e) => { + warn!(self.logger, "Failed to parse ref updates, forwarding raw data"; "error" => ?e); + self.branch.insert(channel, vec![]); + } + } + + if let Some(stdin) = self.stdin.get_mut(&channel) { + stdin.write_all(&buffered).await?; + stdin.flush().await?; + } else { + error!(self.logger, "stdin not found"; "channel" => ?channel); + } + return Ok(()); + } + + if let Some(stdin) = self.stdin.get_mut(&channel) { + stdin.write_all(data).await?; + stdin.flush().await?; + } else { + error!(self.logger, "stdin not found (forwarding)"; "channel" => ?channel); + } + return Ok(()); + } + + if let Some(stdin) = self.stdin.get_mut(&channel) { + stdin.write_all(data).await?; + if matches!(self.service, Some(GitService::UploadPack)) + && !self.upload_pack_eof_sent.contains(&channel) + { + let has_flush_pkt = data.windows(4).any(|w| w == b"0000"); + if has_flush_pkt { + stdin.flush().await?; + let _ = stdin.shutdown().await; + self.upload_pack_eof_sent.insert(channel); + } + } + } + Ok(()) + } + async fn shell_request( + &mut self, + channel_id: ChannelId, + session: &mut Session, + ) -> Result<(), Self::Error> { + if let Some(user) = &self.operator { + let welcome_msg = format!( + "Hi {}! You've successfully authenticated, but GitData does not provide shell access.\r\n", + user.username + ); + + info!(self.logger, "Shell request"; "user" => %user.username); + session + .data(channel_id, CryptoVec::from_slice(welcome_msg.as_bytes())) + .ok(); + session.exit_status_request(channel_id, 0).ok(); + session.eof(channel_id).ok(); + session.close(channel_id).ok(); + let _ = session.flush().ok(); + } else { + warn!(self.logger, "Shell request without authentication"); + let msg = "Authentication required\r\n"; + session + .data(channel_id, CryptoVec::from_slice(msg.as_bytes())) + .ok(); + session.exit_status_request(channel_id, 1).ok(); + session.eof(channel_id).ok(); + session.close(channel_id).ok(); + let _ = session.flush().ok(); + } + Ok(()) + } + async fn exec_request( + &mut self, + channel_id: ChannelId, + data: &[u8], + session: &mut Session, + ) -> Result<(), Self::Error> { + let client_info = self + .client_addr + .map(|addr| format!("{}", addr)) + .unwrap_or_else(|| "unknown".to_string()); + + info!( + self.logger, + "exec_request received, channel: {:?}, client: {}", channel_id, client_info + ); + + let git_shell_cmd = match std::str::from_utf8(data) { + Ok(cmd) => cmd.trim(), + Err(e) => { + error!(self.logger, "Invalid command encoding"; "error" => %e); + session + .disconnect( + Disconnect::ServiceNotAvailable, + "Invalid command encoding", + "", + ) + .ok(); + return Err(russh::Error::Disconnect); + } + }; + let (service, path) = match parse_git_command(git_shell_cmd) { + Some((s, p)) => (s, p), + None => { + error!(self.logger, "Invalid git command"; "command" => %git_shell_cmd); + let msg = format!("Invalid git command: {}", git_shell_cmd); + session + .disconnect(Disconnect::ServiceNotAvailable, &msg, "") + .ok(); + return Err(russh::Error::Disconnect); + } + }; + self.service = Some(service); + let (owner, repo) = match parse_repo_path(path) { + Some(pair) => pair, + None => { + let msg = format!("Invalid repository path: {}", path); + error!(self.logger, "Invalid repo path"; "path" => path); + session + .disconnect(Disconnect::ServiceNotAvailable, &msg, "") + .ok(); + return Err(russh::Error::Disconnect); + } + }; + let repo = repo.strip_suffix(".git").unwrap_or(repo).to_string(); + + let repo = match self.auth.find_repo(owner, &repo).await { + Ok(repo) => repo, + Err(e) => { + // Log the detailed error internally; client receives generic message. + error!(self.logger, "Error fetching repo"; "error" => %e); + session + .disconnect(Disconnect::ServiceNotAvailable, "Repository not found", "") + .ok(); + return Err(russh::Error::Disconnect); + } + }; + + self.model = Some(repo.clone()); + let operator = match &self.operator { + Some(user) => user, + None => { + let msg = "Authentication error: no authenticated user"; + error!(self.logger, "No authenticated user"); + session.disconnect(Disconnect::ByApplication, msg, "").ok(); + return Err(russh::Error::Disconnect); + } + }; + + let is_write = service == GitService::ReceivePack; + let has_permission = self + .auth + .check_repo_permission(operator, &repo, is_write) + .await; + + if !has_permission { + let msg = format!( + "Access denied: user '{}' does not have {} permission for repository {}", + operator.username, + if is_write { "write" } else { "read" }, + repo.repo_name + ); + error!(self.logger, "Access denied"; "user" => %operator.username, "repo" => %repo.repo_name, "is_write" => is_write); + session.disconnect(Disconnect::ByApplication, &msg, "").ok(); + return Err(russh::Error::Disconnect); + } + + let user_id = operator.uid.to_string(); + let repo_path = format!("{}/{}", owner, &repo.repo_name); + if !self + .rate_limiter + .is_repo_access_allowed(&user_id, &repo_path) + .await + { + let msg = format!("Rate limit exceeded for repository access: {}", repo_path); + warn!(self.logger, "Repo access rate limit exceeded"; "user" => %operator.username, "repo" => %repo.repo_name); + session.disconnect(Disconnect::ByApplication, &msg, "").ok(); + return Err(russh::Error::Disconnect); + } + + info!(self.logger, "Access granted"; "user" => %operator.username, "repo" => %repo.repo_name, "is_write" => is_write); + + let repo_path = PathBuf::from(&repo.storage_path); + if !repo_path.exists() { + error!(self.logger, "Repository path not found"; "path" => %repo.storage_path); + } + let mut cmd = build_git_command(service, repo_path); + let logger = self.logger.clone(); + info!(&logger, "Spawning git process"; "service" => ?service, "path" => %repo.storage_path); + let mut shell = match cmd + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + { + Ok(shell) => { + let _ = session.channel_success(channel_id); + shell + } + Err(e) => { + error!(&logger, "Process spawn failed"; "error" => %e); + let _ = session.channel_failure(channel_id); + self.cleanup_channel(channel_id); + dbg!(&e); + return Err(russh::Error::IO(e)); + } + }; + let session_handle = session.handle(); + let stdin = shell.stdin.take().unwrap(); + self.stdin.insert(channel_id, stdin); + let mut shell_stdout = shell.stdout.take().unwrap(); + let mut shell_stderr = shell.stderr.take().unwrap(); + + let (eof_tx, mut eof_rx) = tokio::sync::mpsc::channel::(10); + self.eof.insert(channel_id, eof_tx); + let repo_uid = repo.id; + let should_sync = service == GitService::ReceivePack; + let sync = self.sync.clone(); + let logger_for_fut = self.logger.clone(); + let fut = async move { + info!(&logger_for_fut, "Task started"; "channel" => ?channel_id); + + let mut stdout_done = false; + let mut stderr_done = false; + + let stdout_fut = forward( + &session_handle, + channel_id, + &mut shell_stdout, + |handle, chan, data| async move { handle.data(chan, data).await }, + ); + tokio::pin!(stdout_fut); + + let stderr_fut = forward( + &session_handle, + channel_id, + &mut shell_stderr, + |handle, chan, data| async move { handle.extended_data(chan, 1, data).await }, + ); + tokio::pin!(stderr_fut); + + loop { + tokio::select! { + result = shell.wait() => { + let status = result?; + let status_code = status.code().unwrap_or(128) as u32; + + info!(&logger_for_fut, "Git process exited"; "channel" => ?channel_id, "status" => status_code); + + if !stdout_done || !stderr_done { + let _ = tokio::time::timeout(Duration::from_millis(100), async { + tokio::join!( + async { + if !stdout_done { + let _ = (&mut stdout_fut).await; + } + }, + async { + if !stderr_done { + let _ = (&mut stderr_fut).await; + } + } + ); + }).await; + } + + if should_sync { + let sync = sync.clone(); + tokio::spawn(async move { + sync.send(RepoReceiveSyncTask { repo_uid }).await + }); + } + + let _ = session_handle.exit_status_request(channel_id, status_code).await; + sleep(Duration::from_millis(50)).await; + let _ = session_handle.eof(channel_id).await; + let _ = session_handle.close(channel_id).await; + info!(&logger_for_fut, "Channel closed"; "channel" => ?channel_id); + break; + } + result = &mut stdout_fut, if !stdout_done => { + info!(&logger_for_fut, "stdout completed"); + stdout_done = true; + if let Err(e) = result { + warn!(&logger_for_fut, "stdout forward error"; "error" => ?e); + } + } + result = &mut stderr_fut, if !stderr_done => { + info!(&logger_for_fut, "stderr completed"); + stderr_done = true; + if let Err(e) = result { + warn!(&logger_for_fut, "stderr forward error"; "error" => ?e); + } + } + } + } + + Ok::<(), russh::Error>(()) + }; + + tokio::spawn(async move { + if let Err(e) = fut.await { + error!(&logger, "Git SSH channel task error"; "error" => %e); + } + while eof_rx.recv().await.is_some() {} + }); + Ok(()) + } +} + +fn parse_git_command(cmd: &str) -> Option<(GitService, &str)> { + let (svc, path) = match cmd.split_once(' ') { + Some(("git-receive-pack", path)) => (GitService::ReceivePack, path), + Some(("git-upload-pack", path)) => (GitService::UploadPack, path), + Some(("git-upload-archive", path)) => (GitService::UploadArchive, path), + _ => return None, + }; + Some((svc, strip_apostrophes(path))) +} + +fn parse_repo_path(path: &str) -> Option<(&str, &str)> { + let path = path.trim_matches('/'); + let mut parts = path.splitn(2, '/'); + match (parts.next(), parts.next()) { + (Some(owner), Some(repo)) if !owner.is_empty() && !repo.is_empty() => Some((owner, repo)), + _ => None, + } +} + +fn build_git_command(service: GitService, path: PathBuf) -> tokio::process::Command { + let mut cmd = tokio::process::Command::new("git"); + + let canonical_path = path.canonicalize().unwrap_or(path); + cmd.current_dir(canonical_path); + + match service { + GitService::UploadPack => cmd.arg("upload-pack"), + GitService::ReceivePack => cmd.arg("receive-pack"), + GitService::UploadArchive => cmd.arg("upload-archive"), + }; + + cmd.arg(".") + .env("GIT_CONFIG_NOSYSTEM", "1") + .env("GIT_NO_REPLACE_OBJECTS", "1") + .env("GIT_CONFIG_GLOBAL", "/dev/null") + .env("GIT_CONFIG_SYSTEM", "/dev/null"); + cmd +} + +fn strip_apostrophes(s: &str) -> &str { + s.trim_matches('\'') +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum GitService { + UploadPack, + ReceivePack, + UploadArchive, +} + +impl FromStr for GitService { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "upload-pack" => Ok(Self::UploadPack), + "receive-pack" => Ok(Self::ReceivePack), + "upload-archive" => Ok(Self::UploadArchive), + _ => Err(()), + } + } +} + +async fn forward<'a, R, Fut, Fwd>( + session_handle: &'a Handle, + chan_id: ChannelId, + r: &mut R, + mut fwd: Fwd, +) -> Result<(), russh::Error> +where + R: AsyncRead + Send + Unpin, + Fut: Future> + 'a, + Fwd: FnMut(&'a Handle, ChannelId, CryptoVec) -> Fut, +{ + const BUF_SIZE: usize = 1024 * 32; + + let mut buf = [0u8; BUF_SIZE]; + loop { + let read = r.read(&mut buf).await?; + + if read == 0 { + break; + } + + let mut chunk = CryptoVec::from_slice(&buf[..read]); + loop { + match fwd(session_handle, chan_id, chunk).await { + Ok(()) => break, + Err(unsent) => { + chunk = unsent; + sleep(Duration::from_millis(5)).await; + } + } + } + } + + Ok(()) +} diff --git a/libs/git/ssh/mod.rs b/libs/git/ssh/mod.rs new file mode 100644 index 0000000..4b22646 --- /dev/null +++ b/libs/git/ssh/mod.rs @@ -0,0 +1,288 @@ +use crate::error::GitError; +use crate::hook::pool::types::{HookTask, TaskType}; +use anyhow::Context; +use base64::Engine; +use config::AppConfig; +use db::cache::AppCache; +use db::database::AppDatabase; +use deadpool_redis::cluster::Pool as RedisPool; +use models::users::{user, user_token}; +use russh::keys::PrivateKey; +use russh::server::Server; +use russh::{MethodKind, MethodSet, SshId, server::Config}; +use sea_orm::prelude::*; +use sha2::{Digest, Sha256}; +use slog::{Logger, error, info}; +use std::str::FromStr; +use std::sync::Arc; +use std::time::Duration; + +pub mod authz; +pub mod handle; +pub mod rate_limit; +pub mod server; + +#[derive(Clone)] +pub struct SSHHandle { + pub db: AppDatabase, + pub app: AppConfig, + pub cache: AppCache, + pub redis_pool: RedisPool, + pub logger: Logger, +} + +impl SSHHandle { + pub async fn run(&self) { + let this = self.clone(); + tokio::spawn(async move { + if let Err(e) = this.run_ssh().await { + error!(this.logger, "SSH server error: {}", e); + } + }); + } + pub fn new( + db: AppDatabase, + app: AppConfig, + cache: AppCache, + redis_pool: RedisPool, + logger: Logger, + ) -> Self { + SSHHandle { + db, + app, + cache, + redis_pool, + logger, + } + } + pub async fn run_ssh(&self) -> anyhow::Result<()> { + info!(self.logger, "SSH server starting"); + let private_key_content = self.app.ssh_server_private_key()?; + if private_key_content.is_empty() { + return Err(anyhow::anyhow!("SSH server private key is not configured")); + } + + let preview = if private_key_content.len() > 100 { + format!("{}...", &private_key_content[..100]) + } else { + private_key_content.clone() + }; + info!( + self.logger, + "Loading SSH private key (hex, {} bytes)", + private_key_content.len() + ); + + let private_key_bytes = hex::decode(&private_key_content).with_context(|| { + format!( + "Failed to decode hex-encoded SSH private key. Preview: {}", + preview + ) + })?; + + info!( + self.logger, + "Hex decoded to {} bytes", + private_key_bytes.len() + ); + + let private_key_pem = std::str::from_utf8(&private_key_bytes) + .with_context(|| "Decoded SSH private key is not valid UTF-8")?; + + if let Some(first_line) = private_key_pem.lines().next() { + info!(self.logger, "PEM format starts with: {}", first_line); + } + + info!( + self.logger, + "Complete private key content:\n{}", private_key_pem + ); + + let private_key = { + match ssh_key::PrivateKey::from_openssh(private_key_pem) { + Ok(ssh_key) => { + info!(self.logger, "Successfully parsed with ssh-key crate"); + let openssh_pem = ssh_key + .to_openssh(ssh_key::LineEnding::LF) + .with_context(|| "Failed to serialize to OpenSSH format")?; + + PrivateKey::from_str(&openssh_pem) + .with_context(|| "Failed to parse with russh after ssh-key conversion")? + } + Err(e) => { + info!( + self.logger, + "ssh-key from_openssh failed: {}, trying direct russh parse", e + ); + PrivateKey::from_str(private_key_pem).with_context(|| { + format!("Failed to parse SSH private key with both methods") + })? + } + } + }; + info!(self.logger, "SSH private key loaded"); + let mut config = Config::default(); + config.keys = vec![private_key]; + let version = format!("SSH-2.0-GitdataAI {}", env!("CARGO_PKG_VERSION")); + config.server_id = SshId::Standard(version); + let mut method = MethodSet::empty(); + method.push(MethodKind::PublicKey); + method.push(MethodKind::Password); + config.methods = method; + config.inactivity_timeout = Some(Duration::from_secs(300)); + config.keepalive_interval = Some(Duration::from_secs(60)); + config.keepalive_max = 3; + + info!( + self.logger, + "SSH server configured with methods: {:?}", config.methods + ); + let token_service = SshTokenService::new(self.db.clone()); + let mut server = server::SSHServer::new( + self.db.clone(), + self.cache.clone(), + self.redis_pool.clone(), + self.logger.clone(), + token_service, + ); + let ssh_port = self.app.ssh_port()?; + let bind_addr = format!("0.0.0.0:{}", ssh_port); + let public_host = self.app.ssh_domain()?; + let msg = if ssh_port == 22 { + format!( + "SSH server listening on port 22. Please use port {} for SSH connections.", + ssh_port + ) + } else { + format!( + "SSH server listening on port {} (public: {}). Please use port {} for SSH connections.", + ssh_port, public_host, ssh_port + ) + }; + info!(self.logger, "{}", msg); + server.run_on_address(Arc::new(config), bind_addr).await?; + Ok(()) + } +} + +#[derive(Clone)] +pub struct ReceiveSyncService { + pool: RedisPool, + logger: Logger, + /// Redis key prefix for hook task queues, e.g. "{hook}". + redis_prefix: String, +} + +impl ReceiveSyncService { + pub fn new(pool: RedisPool, logger: Logger) -> Self { + Self { + pool, + logger, + redis_prefix: "{hook}".to_string(), + } + } + + pub async fn send(&self, task: RepoReceiveSyncTask) { + let hook_task = HookTask { + id: uuid::Uuid::new_v4().to_string(), + repo_id: task.repo_uid.to_string(), + task_type: TaskType::Sync, + payload: serde_json::Value::Null, + created_at: chrono::Utc::now(), + }; + + let task_json = match serde_json::to_string(&hook_task) { + Ok(j) => j, + Err(e) => { + error!(self.logger, "Failed to serialize hook task: {}", e); + return; + } + }; + + let queue_key = format!("{}:sync", self.redis_prefix); + + let redis = match self.pool.get().await { + Ok(conn) => conn, + Err(e) => { + error!(self.logger, "Failed to get Redis connection: {}", e); + return; + } + }; + + let mut conn: deadpool_redis::cluster::Connection = redis; + if let Err(e) = redis::cmd("LPUSH") + .arg(&queue_key) + .arg(&task_json) + .query_async::<()>(&mut conn) + .await + { + error!(self.logger, "Failed to LPUSH sync task"; "error" => %e, "repo_id" => %task.repo_uid); + } + } +} + +#[derive(Clone)] +pub struct RepoReceiveSyncTask { + pub repo_uid: uuid::Uuid, +} + +/// SSH token authentication service. +/// Uses the same token hash algorithm as user access keys (SHA256 + base64). +#[derive(Clone)] +pub struct SshTokenService { + db: AppDatabase, +} + +impl SshTokenService { + pub fn new(db: AppDatabase) -> Self { + Self { db } + } + + fn hash_token(token: &str) -> String { + let mut hasher = Sha256::new(); + hasher.update(token.as_bytes()); + base64::prelude::BASE64_STANDARD.encode(hasher.finalize()) + } + + pub async fn find_user_by_token(&self, token: &str) -> Result, GitError> { + let token_hash = Self::hash_token(token); + + let token_model = user_token::Entity::find() + .filter(user_token::Column::TokenHash.eq(&token_hash)) + .filter(user_token::Column::IsRevoked.eq(false)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + + let token_model = match token_model { + Some(t) => t, + None => return Ok(None), + }; + + // Check expiry + if let Some(expires_at) = token_model.expires_at { + if expires_at < chrono::Utc::now() { + return Ok(None); + } + } + + let user_model = user::Entity::find() + .filter(user::Column::Uid.eq(token_model.user)) + .one(self.db.reader()) + .await + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(user_model) + } +} + +pub async fn run_ssh(config: AppConfig, logger: Logger) -> anyhow::Result<()> { + info!(logger, "SSH server initializing"); + let db = AppDatabase::init(&config).await?; + let cache = AppCache::init(&config).await?; + let redis_pool = cache.redis_pool().clone(); + SSHHandle::new(db, config.clone(), cache, redis_pool, logger) + .run_ssh() + .await?; + Ok(()) +} diff --git a/libs/git/ssh/rate_limit.rs b/libs/git/ssh/rate_limit.rs new file mode 100644 index 0000000..88583c2 --- /dev/null +++ b/libs/git/ssh/rate_limit.rs @@ -0,0 +1,134 @@ +use std::collections::HashMap; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; +use tokio::time::interval; + +#[derive(Debug, Clone)] +pub struct RateLimitConfig { + pub requests_per_window: u32, + pub window_duration: Duration, +} + +impl Default for RateLimitConfig { + fn default() -> Self { + Self { + requests_per_window: 100, + window_duration: Duration::from_secs(60), + } + } +} + +#[derive(Debug)] +struct RateLimitState { + count: u32, + reset_time: Instant, +} + +pub struct RateLimiter { + limits: Arc>>, + config: RateLimitConfig, +} + +impl RateLimiter { + pub fn new(config: RateLimitConfig) -> Self { + Self { + limits: Arc::new(RwLock::new(HashMap::new())), + config, + } + } + + pub async fn is_allowed(&self, key: &str) -> bool { + let now = Instant::now(); + let mut limits = self.limits.write().await; + + let state = limits + .entry(key.to_string()) + .or_insert_with(|| RateLimitState { + count: 0, + reset_time: now + self.config.window_duration, + }); + + if now >= state.reset_time { + state.count = 0; + state.reset_time = now + self.config.window_duration; + } + + if state.count >= self.config.requests_per_window { + return false; + } + + state.count += 1; + true + } + + pub async fn remaining_requests(&self, key: &str) -> u32 { + let now = Instant::now(); + let limits = self.limits.read().await; + + if let Some(state) = limits.get(key) { + if now >= state.reset_time { + self.config.requests_per_window + } else { + self.config.requests_per_window.saturating_sub(state.count) + } + } else { + self.config.requests_per_window + } + } + + pub async fn reset_time(&self, key: &str) -> Duration { + let now = Instant::now(); + let limits = self.limits.read().await; + + if let Some(state) = limits.get(key) { + if now >= state.reset_time { + Duration::from_secs(0) + } else { + state.reset_time.duration_since(now) + } + } else { + Duration::from_secs(0) + } + } + + /// Start a background cleanup task that removes expired entries every 5 minutes. + /// This prevents unbounded HashMap growth. + pub fn start_cleanup(self: Arc) -> tokio::task::JoinHandle<()> { + tokio::spawn(async move { + let mut ticker = interval(Duration::from_secs(300)); // every 5 minutes + loop { + ticker.tick().await; + let now = Instant::now(); + let mut limits = self.limits.write().await; + limits.retain(|_, state| now < state.reset_time); + } + }) + } +} + +pub struct SshRateLimiter { + limiter: RateLimiter, +} + +impl SshRateLimiter { + pub fn new() -> Self { + Self { + limiter: RateLimiter::new(RateLimitConfig::default()), + } + } + + pub async fn is_user_allowed(&self, user_id: &str) -> bool { + self.limiter.is_allowed(&format!("user:{}", user_id)).await + } + + pub async fn is_ip_allowed(&self, ip_address: &str) -> bool { + self.limiter.is_allowed(&format!("ip:{}", ip_address)).await + } + + pub async fn is_repo_access_allowed(&self, user_id: &str, repo_path: &str) -> bool { + self.limiter + .is_allowed(&format!("repo_access:{}:{}", user_id, repo_path)) + .await + } +} diff --git a/libs/git/ssh/server.rs b/libs/git/ssh/server.rs new file mode 100644 index 0000000..b06e31d --- /dev/null +++ b/libs/git/ssh/server.rs @@ -0,0 +1,109 @@ +use crate::ssh::ReceiveSyncService; +use crate::ssh::SshTokenService; +use crate::ssh::handle::SSHandle; +use crate::ssh::rate_limit::SshRateLimiter; +use db::cache::AppCache; +use db::database::AppDatabase; +use deadpool_redis::cluster::Pool as RedisPool; +use russh::server::Handler; +use slog::{Logger, info, warn}; +use std::io; +use std::net::SocketAddr; +use std::sync::Arc; + +pub struct SSHServer { + pub db: AppDatabase, + pub cache: AppCache, + pub redis_pool: RedisPool, + pub logger: Logger, + pub rate_limiter: Arc, + pub token_service: SshTokenService, +} + +impl SSHServer { + pub fn new( + db: AppDatabase, + cache: AppCache, + redis_pool: RedisPool, + logger: Logger, + token_service: SshTokenService, + ) -> Self { + SSHServer { + db, + cache, + redis_pool, + logger, + rate_limiter: Arc::new(SshRateLimiter::new()), + token_service, + } + } +} +impl russh::server::Server for SSHServer { + type Handler = SSHandle; + + fn new_client(&mut self, addr: Option) -> Self::Handler { + if let Some(addr) = addr { + let ip = addr.ip().to_string(); + info!( + self.logger, + "New SSH connection from {}:{}", + ip, + addr.port() + ); + + let rate_limiter = self.rate_limiter.clone(); + let logger = self.logger.clone(); + tokio::spawn(async move { + if !rate_limiter.is_ip_allowed(&ip).await { + warn!(logger, "IP rate limit exceeded"; "ip" => %ip); + } + }); + } else { + info!(self.logger, "New SSH connection from unknown address"); + } + let sync_service = ReceiveSyncService::new(self.redis_pool.clone(), self.logger.clone()); + SSHandle::new( + self.db.clone(), + self.cache.clone(), + sync_service, + self.logger.clone(), + self.rate_limiter.clone(), + self.token_service.clone(), + addr, + ) + } + + fn handle_session_error(&mut self, error: ::Error) { + match error { + russh::Error::Disconnect => { + info!(self.logger, "Connection disconnected by peer"); + } + russh::Error::Inconsistent => { + warn!(self.logger, "Protocol inconsistency detected"); + } + russh::Error::NotAuthenticated => { + warn!(self.logger, "Authentication failed"); + } + russh::Error::IO(ref io_err) => { + let error_msg = format!( + "IO error: kind={:?}, message={}, raw_os_error={:?}", + io_err.kind(), + io_err, + io_err.raw_os_error() + ); + warn!(self.logger, "{}", error_msg); + + if io_err.kind() == io::ErrorKind::UnexpectedEof { + warn!( + self.logger, + "Client disconnected during handshake or before authentication" + ); + } + } + _ => { + let error_msg = format!("SSH session error: {}", error); + warn!(self.logger, "{}", error_msg); + } + } + } +} diff --git a/libs/git/tags/mod.rs b/libs/git/tags/mod.rs new file mode 100644 index 0000000..834ccf4 --- /dev/null +++ b/libs/git/tags/mod.rs @@ -0,0 +1,4 @@ +//! Tag domain — all tag-related operations on a GitDomain. +pub mod ops; +pub mod query; +pub mod types; diff --git a/libs/git/tags/ops.rs b/libs/git/tags/ops.rs new file mode 100644 index 0000000..f4298b9 --- /dev/null +++ b/libs/git/tags/ops.rs @@ -0,0 +1,228 @@ +//! Tag create/delete/rename operations. + +use crate::commit::types::{CommitOid, CommitSignature}; +use crate::ref_utils::validate_ref_name; +use crate::tags::types::TagInfo; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn tag_create( + &self, + name: &str, + target: &CommitOid, + message: &str, + tagger: &CommitSignature, + force: bool, + ) -> GitResult { + validate_ref_name(name)?; + + let target_oid = target + .to_oid() + .map_err(|_| GitError::InvalidOid(target.to_string()))?; + + let obj = self + .repo() + .find_object(target_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let sig = self.commit_signature_to_git2(tagger)?; + + let tag_oid = self + .repo() + .tag(name, &obj, &sig, message, force) + .map_err(|e| { + if e.code() == git2::ErrorCode::Exists { + GitError::TagExists(name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + let ref_name = format!("refs/tags/{}", name); + self.repo + .reference(&ref_name, tag_oid, true, "create tag") + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(TagInfo { + name: name.to_string(), + oid: CommitOid::from_git2(tag_oid), + target: target.clone(), + is_annotated: true, + message: Some(message.to_string()), + tagger: Some(tagger.name.clone()), + tagger_email: Some(tagger.email.clone()), + }) + } + + pub fn tag_create_lightweight( + &self, + name: &str, + target: &CommitOid, + force: bool, + ) -> GitResult { + validate_ref_name(name)?; + + let target_oid = target + .to_oid() + .map_err(|_| GitError::InvalidOid(target.to_string()))?; + + let obj = self + .repo() + .find_object(target_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let tag_oid = self + .repo() + .tag_lightweight(name, &obj, force) + .map_err(|e| { + if e.code() == git2::ErrorCode::Exists { + GitError::TagExists(name.to_string()) + } else { + GitError::Internal(e.to_string()) + } + })?; + + Ok(TagInfo { + name: name.to_string(), + oid: CommitOid::from_git2(tag_oid), + target: target.clone(), + is_annotated: false, + message: None, + tagger: None, + tagger_email: None, + }) + } + + pub fn tag_delete(&self, name: &str) -> GitResult<()> { + let full_name = if name.starts_with("refs/tags/") { + name.to_string() + } else { + format!("refs/tags/{}", name) + }; + + let mut reference = self + .repo() + .find_reference(&full_name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + reference + .delete() + .map_err(|e| GitError::Internal(e.to_string())) + } + + pub fn tag_rename(&self, old_name: &str, new_name: &str) -> GitResult { + validate_ref_name(new_name)?; + + let old_ref = if old_name.starts_with("refs/tags/") { + old_name.to_string() + } else { + format!("refs/tags/{}", old_name) + }; + + let new_ref = format!("refs/tags/{}", new_name); + + let info = self.tag_get(old_name)?; + + let mut reference = self + .repo() + .find_reference(&old_ref) + .map_err(|_e| GitError::RefNotFound(old_name.to_string()))?; + + let target_oid = reference + .target() + .ok_or_else(|| GitError::Internal("tag has no target".to_string()))?; + + reference + .delete() + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .reference(&new_ref, target_oid, true, "rename tag") + .map_err(|e| GitError::Internal(e.to_string()))?; + + Ok(TagInfo { + name: new_name.to_string(), + oid: info.oid, + target: info.target, + is_annotated: info.is_annotated, + message: info.message, + tagger: info.tagger, + tagger_email: info.tagger_email, + }) + } + + pub fn tag_update_message( + &self, + name: &str, + message: &str, + tagger: &CommitSignature, + ) -> GitResult { + let full_name = if name.starts_with("refs/tags/") { + name.to_string() + } else { + format!("refs/tags/{}", name) + }; + + let reference = self + .repo() + .find_reference(&full_name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let tag_oid = reference + .target() + .ok_or_else(|| GitError::Internal("tag reference has no target".to_string()))?; + + let tag_obj = self + .repo() + .find_object(tag_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + if tag_obj.kind() != Some(git2::ObjectType::Tag) { + return Err(GitError::Internal( + "cannot update message of a lightweight tag".to_string(), + )); + } + + let commit_obj = tag_obj + .as_tag() + .and_then(|t| t.peel().ok()) + .ok_or_else(|| GitError::Internal("cannot peel tag to commit".to_string()))?; + + let sig = self.commit_signature_to_git2(tagger)?; + + let new_tag_oid = self + .repo() + .tag(name, &commit_obj, &sig, message, true) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let temp_name = format!("{}/update-tmp-{}", full_name, std::process::id()); + self.repo + .reference(&temp_name, new_tag_oid, true, "update tag message (temp)") + .map_err(|e| GitError::Internal(e.to_string()))?; + + self.repo + .reference(&full_name, new_tag_oid, true, "update tag message") + .map_err(|e| GitError::Internal(e.to_string()))?; + + if let Ok(mut temp_ref) = self.repo().find_reference(&temp_name) { + if let Err(e) = temp_ref.delete() { + // Log but do not fail — a leftover temporary reference is non-critical + // but indicates something went wrong during tag update. + eprintln!( + "failed to delete temporary tag reference {}: {}", + temp_name, e + ); + } + } + + Ok(TagInfo { + name: name.to_string(), + oid: CommitOid::from_git2(new_tag_oid), + target: CommitOid::from_git2(commit_obj.id()), + is_annotated: true, + message: Some(message.to_string()), + tagger: Some(tagger.name.clone()), + tagger_email: Some(tagger.email.clone()), + }) + } +} diff --git a/libs/git/tags/query.rs b/libs/git/tags/query.rs new file mode 100644 index 0000000..22e2db4 --- /dev/null +++ b/libs/git/tags/query.rs @@ -0,0 +1,201 @@ +//! Tag querying operations. + +use crate::commit::types::CommitOid; +use crate::tags::types::{TagInfo, TagSummary}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn tag_list(&self) -> GitResult> { + let tag_names = self + .repo() + .tag_names(None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let mut tags = Vec::with_capacity(16); + let mut errors: Vec<(String, GitError)> = Vec::new(); + let count = tag_names.len(); + for i in 0..count { + if let Some(name) = tag_names.get(i) { + match self.tag_get(name) { + Ok(info) => tags.push(info), + Err(e) => errors.push((name.to_string(), e)), + } + } + } + if !errors.is_empty() { + return Err(GitError::Internal(format!( + "failed to get {} tag(s): {}", + errors.len(), + errors + .into_iter() + .map(|(n, e)| format!("{}: {}", n, e)) + .collect::>() + .join("; ") + ))); + } + Ok(tags) + } + + pub fn tag_list_names(&self) -> GitResult> { + let names = self + .repo() + .tag_names(None) + .map_err(|e| GitError::Internal(e.to_string()))?; + let count = names.len(); + let mut result = Vec::with_capacity(count); + for i in 0..count { + if let Some(name) = names.get(i) { + result.push(name.to_string()); + } + } + Ok(result) + } + + pub fn tag_count(&self) -> GitResult { + let names = self.tag_list_names()?; + Ok(names.len()) + } + + pub fn tag_summary(&self) -> GitResult { + let count = self.tag_count()?; + Ok(TagSummary { total_count: count }) + } + + pub fn tag_get(&self, name: &str) -> GitResult { + let full_name = if name.starts_with("refs/tags/") { + name.to_string() + } else { + format!("refs/tags/{}", name) + }; + + let reference = self + .repo() + .find_reference(&full_name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target_oid = reference + .target() + .ok_or_else(|| GitError::Internal("tag reference has no target".to_string()))?; + + let target = CommitOid::from_git2(target_oid); + + let obj = self + .repo() + .find_object(target_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + if obj.kind() == Some(git2::ObjectType::Tag) { + let tag = self + .repo() + .find_tag(target_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let tagger = tag.tagger().map(|s| { + ( + s.name().unwrap_or("").to_string(), + s.email().unwrap_or("").to_string(), + ) + }); + + Ok(TagInfo { + name: name.to_string(), + oid: CommitOid::from_git2(target_oid), + target, + is_annotated: true, + message: tag.message().map(String::from), + tagger: tagger.as_ref().map(|(n, _)| n.clone()), + tagger_email: tagger.as_ref().map(|(_, e)| e.clone()), + }) + } else { + Ok(TagInfo { + name: name.to_string(), + oid: CommitOid::from_git2(target_oid), + target, + is_annotated: false, + message: None, + tagger: None, + tagger_email: None, + }) + } + } + + pub fn tag_exists(&self, name: &str) -> bool { + let full_name = if name.starts_with("refs/tags/") { + name.to_string() + } else { + format!("refs/tags/{}", name) + }; + + self.repo.find_reference(&full_name).is_ok() + } + + pub fn tag_target(&self, name: &str) -> GitResult> { + let info = self.tag_get(name)?; + Ok(Some(info.target)) + } + + pub fn tag_is_annotated(&self, name: &str) -> GitResult { + let (_, is_tag) = self.tag_reference_info(name)?; + Ok(is_tag) + } + + pub fn tag_message(&self, name: &str) -> GitResult> { + if let Some(tag) = self.find_annotated_tag(name)? { + Ok(tag.message().map(String::from)) + } else { + Ok(None) + } + } + + pub fn tag_tagger(&self, name: &str) -> GitResult> { + if let Some(tag) = self.find_annotated_tag(name)? { + Ok(tag.tagger().map(|s| { + ( + s.name().unwrap_or("").to_string(), + s.email().unwrap_or("").to_string(), + ) + })) + } else { + Ok(None) + } + } + + /// Look up a tag's reference OID and whether it is an annotated tag. + fn tag_reference_info(&self, name: &str) -> GitResult<(git2::Oid, bool)> { + let full_name = if name.starts_with("refs/tags/") { + name.to_string() + } else { + format!("refs/tags/{}", name) + }; + + let reference = self + .repo() + .find_reference(&full_name) + .map_err(|_e| GitError::RefNotFound(name.to_string()))?; + + let target_oid = reference + .target() + .ok_or_else(|| GitError::Internal("tag reference has no target".to_string()))?; + + let obj = self + .repo() + .find_object(target_oid, None) + .map_err(|e| GitError::Internal(e.to_string()))?; + + let is_tag = obj.kind() == Some(git2::ObjectType::Tag); + Ok((target_oid, is_tag)) + } + + /// Find the git2 Tag object for an annotated tag, or None if it is a lightweight tag. + fn find_annotated_tag(&self, name: &str) -> GitResult>> { + let (target_oid, is_tag) = self.tag_reference_info(name)?; + if !is_tag { + return Ok(None); + } + let tag = self + .repo() + .find_tag(target_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(Some(tag)) + } +} diff --git a/libs/git/tags/types.rs b/libs/git/tags/types.rs new file mode 100644 index 0000000..4950e6c --- /dev/null +++ b/libs/git/tags/types.rs @@ -0,0 +1,21 @@ +//! Serializable types for the tag domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TagInfo { + pub name: String, + pub oid: CommitOid, + pub target: CommitOid, + pub is_annotated: bool, + pub message: Option, + pub tagger: Option, + pub tagger_email: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TagSummary { + pub total_count: usize, +} diff --git a/libs/git/tree/mod.rs b/libs/git/tree/mod.rs new file mode 100644 index 0000000..c919433 --- /dev/null +++ b/libs/git/tree/mod.rs @@ -0,0 +1,3 @@ +//! Tree domain — all tree-related operations on a GitDomain. +pub mod query; +pub mod types; diff --git a/libs/git/tree/query.rs b/libs/git/tree/query.rs new file mode 100644 index 0000000..bac08f1 --- /dev/null +++ b/libs/git/tree/query.rs @@ -0,0 +1,133 @@ +//! Tree query operations. + +use std::path::Path; + +use crate::commit::types::CommitOid; +use crate::tree::types::{TreeEntry, TreeInfo}; +use crate::{GitDomain, GitError, GitResult}; + +impl GitDomain { + pub fn tree_get(&self, oid: &CommitOid) -> GitResult { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + let tree = self + .repo() + .find_tree(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + Ok(TreeInfo::from_git2(&tree)) + } + + pub fn tree_exists(&self, oid: &CommitOid) -> bool { + oid.to_oid() + .ok() + .and_then(|oid| self.repo.find_tree(oid).ok()) + .is_some() + } + + pub fn tree_entry(&self, oid: &CommitOid, index: usize) -> GitResult { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + let tree = self + .repo() + .find_tree(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + let entry = tree + .get(index) + .ok_or_else(|| GitError::Internal("tree entry not found".to_string()))?; + Ok(TreeEntry::from_git2(entry, self.repo())) + } + + pub fn tree_list(&self, oid: &CommitOid) -> GitResult> { + let oid = oid + .to_oid() + .map_err(|_| GitError::InvalidOid(oid.to_string()))?; + let tree = self + .repo() + .find_tree(oid) + .map_err(|_| GitError::ObjectNotFound(oid.to_string()))?; + let repo = self.repo(); + let entries: Vec = tree + .iter() + .map(|entry| TreeEntry::from_git2(entry, repo)) + .collect(); + Ok(entries) + } + + pub fn tree_entry_count(&self, oid: &CommitOid) -> GitResult { + let info = self.tree_get(oid)?; + Ok(info.entry_count) + } + + pub fn tree_entry_by_path(&self, tree_oid: &CommitOid, path: &str) -> GitResult { + let oid = tree_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(tree_oid.to_string()))?; + let tree = self + .repo() + .find_tree(oid) + .map_err(|_| GitError::ObjectNotFound(tree_oid.to_string()))?; + let entry = tree + .get_path(Path::new(path)) + .map_err(|e| GitError::Internal(format!("path '{}': {}", path, e)))?; + Ok(TreeEntry::from_git2(entry, self.repo())) + } + + pub fn tree_entry_by_path_from_commit( + &self, + commit_oid: &CommitOid, + path: &str, + ) -> GitResult { + let oid = commit_oid + .to_oid() + .map_err(|_| GitError::InvalidOid(commit_oid.to_string()))?; + let commit = self + .repo() + .find_commit(oid) + .map_err(|_| GitError::ObjectNotFound(commit_oid.to_string()))?; + let tree = self + .repo() + .find_tree(commit.tree_id()) + .map_err(|e| GitError::Internal(e.to_string()))?; + let entry = tree + .get_path(Path::new(path)) + .map_err(|e| GitError::Internal(format!("path '{}': {}", path, e)))?; + Ok(TreeEntry::from_git2(entry, self.repo())) + } + + pub fn tree_is_empty(&self, oid: &CommitOid) -> GitResult { + let info = self.tree_get(oid)?; + Ok(info.is_empty) + } + + pub fn tree_diffstats( + &self, + old_tree: &CommitOid, + new_tree: &CommitOid, + ) -> GitResult { + use crate::diff::types::DiffStats; + let old_oid = old_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(old_tree.to_string()))?; + let new_oid = new_tree + .to_oid() + .map_err(|_| GitError::InvalidOid(new_tree.to_string()))?; + let old_tree = self + .repo() + .find_tree(old_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let new_tree = self + .repo() + .find_tree(new_oid) + .map_err(|e| GitError::Internal(e.to_string()))?; + let diff = self + .repo() + .diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None) + .map_err(|e| GitError::Internal(e.to_string()))?; + let stats = diff + .stats() + .map_err(|e| GitError::Internal(e.to_string()))?; + Ok(DiffStats::from_git2(&stats)) + } +} diff --git a/libs/git/tree/types.rs b/libs/git/tree/types.rs new file mode 100644 index 0000000..acd376d --- /dev/null +++ b/libs/git/tree/types.rs @@ -0,0 +1,54 @@ +//! Serializable types for the tree domain. + +use serde::{Deserialize, Serialize}; + +use crate::commit::types::CommitOid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TreeInfo { + pub oid: CommitOid, + pub entry_count: usize, + pub is_empty: bool, +} + +impl TreeInfo { + pub fn from_git2(tree: &git2::Tree<'_>) -> Self { + Self { + oid: CommitOid::from_git2(tree.id()), + entry_count: tree.len(), + is_empty: tree.is_empty(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TreeEntry { + pub name: String, + pub oid: CommitOid, + pub kind: String, + pub filemode: u32, + pub is_binary: bool, +} + +impl TreeEntry { + pub fn from_git2(entry: git2::TreeEntry<'_>, repo: &git2::Repository) -> Self { + let kind = entry + .kind() + .map(|k| format!("{:?}", k).to_lowercase()) + .unwrap_or_default(); + // Binary detection: check actual blob content, not just object type. + // Blob type means "file content" in git, not "binary file". + let is_binary = entry + .to_object(repo) + .ok() + .and_then(|o| o.as_blob().map(|blob| blob.is_binary())) + .unwrap_or(false); + Self { + name: entry.name().unwrap_or("").to_string(), + oid: CommitOid::from_git2(entry.id()), + kind, + filemode: entry.filemode() as u32, + is_binary, + } + } +} diff --git a/libs/migrate/Cargo.toml b/libs/migrate/Cargo.toml new file mode 100644 index 0000000..4da33e1 --- /dev/null +++ b/libs/migrate/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "migrate" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[lib] +path = "lib.rs" +name = "migrate" + +[dependencies] +sea-orm-migration = { workspace = true } +sea-orm = { workspace = true } +sea-query = { workspace = true } +models = { workspace = true } +async-trait = { workspace = true } + +[lints] +workspace = true diff --git a/libs/migrate/lib.rs b/libs/migrate/lib.rs new file mode 100644 index 0000000..ad772fd --- /dev/null +++ b/libs/migrate/lib.rs @@ -0,0 +1,248 @@ +pub use sea_orm_migration::prelude::*; + +pub async fn execute_sql(manager: &SchemaManager<'_>, sql: &str) -> Result<(), DbErr> { + for stmt in split_sql_statements(sql) { + if stmt.is_empty() { + continue; + } + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + stmt, + )) + .await?; + } + Ok(()) +} + +fn split_sql_statements(sql: &str) -> Vec<&str> { + sql.split(';') + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_split_simple() { + let sql = "SELECT 1; SELECT 2; SELECT 3"; + let stmts = split_sql_statements(sql); + assert_eq!(stmts, &["SELECT 1", "SELECT 2", "SELECT 3"]); + } +} + +pub struct Migrator; + +#[async_trait::async_trait] +impl MigratorTrait for Migrator { + fn migrations() -> Vec> { + vec![ + // Foundation tables (no dependencies) + Box::new(m20250628_000002_create_user::Migration), + Box::new(m20250628_000007_create_user_password::Migration), + Box::new(m20250628_000005_create_user_email::Migration), + Box::new(m20250628_000003_create_user_2fa::Migration), + Box::new(m20250628_000006_create_user_notification::Migration), + Box::new(m20250628_000009_create_user_preferences::Migration), + Box::new(m20250628_000008_create_user_password_reset::Migration), + Box::new(m20250628_000010_create_user_relation::Migration), + Box::new(m20250628_000011_create_user_ssh_key::Migration), + Box::new(m20250628_000012_create_user_token::Migration), + Box::new(m20250628_000004_create_user_activity_log::Migration), + // Project tables + Box::new(m20260411_000001_create_workspace::Migration), + Box::new(m20260411_000002_create_workspace_membership::Migration), + Box::new(m20260411_000003_add_workspace_id_to_project::Migration), + Box::new(m20260411_000004_add_invite_token_to_workspace_membership::Migration), + Box::new(m20260412_000003_create_project_skill::Migration), + Box::new(m20260413_000001_add_skill_commit_blob::Migration), + Box::new(m20260414_000001_create_agent_task::Migration), + Box::new(m20260412_000002_create_workspace_billing_history::Migration), + Box::new(m20250628_000013_create_project::Migration), + Box::new(m20250628_000014_create_project_access_log::Migration), + Box::new(m20250628_000015_create_project_audit_log::Migration), + Box::new(m20250628_000016_create_project_billing::Migration), + Box::new(m20250628_000017_create_project_billing_history::Migration), + Box::new(m20250628_000018_create_project_follow::Migration), + Box::new(m20250628_000019_create_project_history_name::Migration), + Box::new(m20250628_000020_create_project_label::Migration), + Box::new(m20250628_000021_create_project_like::Migration), + Box::new(m20250628_000022_create_project_member_invitations::Migration), + Box::new(m20250628_000023_create_project_member_join_answers::Migration), + Box::new(m20250628_000024_create_project_member_join_request::Migration), + Box::new(m20250628_000025_create_project_member_join_settings::Migration), + Box::new(m20250628_000026_create_project_members::Migration), + Box::new(m20250628_000027_create_project_watch::Migration), + // Repo tables + Box::new(m20250628_000028_create_repo::Migration), + Box::new(m20250628_000029_create_repo_branch::Migration), + Box::new(m20250628_000030_create_repo_branch_protect::Migration), + Box::new(m20250628_000031_create_repo_collaborator::Migration), + Box::new(m20250628_000032_create_repo_commit::Migration), + Box::new(m20250628_000033_create_repo_fork::Migration), + Box::new(m20250628_000034_create_repo_history_name::Migration), + Box::new(m20250628_000035_create_repo_hook::Migration), + Box::new(m20250628_000036_create_repo_lfs_lock::Migration), + Box::new(m20250628_000037_create_repo_lfs_object::Migration), + Box::new(m20250628_000038_create_repo_lock::Migration), + Box::new(m20250628_000039_create_repo_star::Migration), + Box::new(m20250628_000040_create_repo_tag::Migration), + Box::new(m20250628_000041_create_repo_upstream::Migration), + Box::new(m20250628_000042_create_repo_watch::Migration), + Box::new(m20250628_000043_create_repo_webhook::Migration), + // Issue tables + Box::new(m20250628_000044_create_issue::Migration), + Box::new(m20250628_000045_create_issue_assignee::Migration), + Box::new(m20250628_000046_create_issue_comment::Migration), + Box::new(m20250628_000047_create_issue_comment_reaction::Migration), + Box::new(m20250628_000048_create_issue_label::Migration), + Box::new(m20250628_000049_create_issue_pull_request::Migration), + Box::new(m20250628_000050_create_issue_reaction::Migration), + Box::new(m20250628_000051_create_issue_repo::Migration), + Box::new(m20250628_000052_create_issue_subscriber::Migration), + // Pull request tables + Box::new(m20250628_000053_create_pull_request::Migration), + Box::new(m20250628_000054_create_pull_request_commit::Migration), + Box::new(m20250628_000055_create_pull_request_review::Migration), + Box::new(m20250628_000056_create_pull_request_review_comment::Migration), + // Room tables + Box::new(m20250628_000057_create_room_category::Migration), + Box::new(m20250628_000058_create_room::Migration), + Box::new(m20250628_000059_create_room_ai::Migration), + Box::new(m20250628_000060_create_room_member::Migration), + Box::new(m20250628_000061_create_room_message::Migration), + Box::new(m20250628_000062_create_room_pin::Migration), + Box::new(m20250628_000063_create_room_thread::Migration), + // Agent tables + Box::new(m20250628_000068_create_ai_model_provider::Migration), + Box::new(m20250628_000064_create_ai_model::Migration), + Box::new(m20250628_000069_create_ai_model_version::Migration), + Box::new(m20250628_000065_create_ai_model_capability::Migration), + Box::new(m20250628_000066_create_ai_model_parameter_profile::Migration), + Box::new(m20250628_000067_create_ai_model_pricing::Migration), + // AI session tables + Box::new(m20250628_000070_create_ai_session::Migration), + Box::new(m20250628_000072_create_ai_tool_call::Migration), + Box::new(m20250628_000071_create_ai_tool_auth::Migration), + // System tables + Box::new(m20250628_000073_create_label::Migration), + Box::new(m20250628_000074_create_notify::Migration), + Box::new(m20250628_000076_create_user_email_change::Migration), + Box::new(m20250628_000077_create_project_activity::Migration), + Box::new(m20250628_000078_add_room_member_do_not_disturb::Migration), + Box::new(m20250628_000079_add_room_message_in_reply_to::Migration), + Box::new(m20250628_000080_add_message_reactions_and_search::Migration), + Box::new(m20250628_000081_add_message_edit_history::Migration), + Box::new(m20250628_000082_add_pr_review_comment_resolve::Migration), + Box::new(m20250628_000083_add_pr_review_request::Migration), + Box::new(m20260407_000001_extend_repo_branch_protect::Migration), + Box::new(m20260407_000002_create_project_board::Migration), + Box::new(m20260407_000003_add_repo_ai_code_review::Migration), + // All CREATE TABLE migrations now use correct column types (TIMESTAMPTZ) and names. + // For existing databases that were created with buggy migrations, apply the fix + // by manually running: ALTER TABLE ... ALTER COLUMN ... TYPE TIMESTAMPTZ; + // Room notifications (already existed) + Box::new(m20250628_000001_create_room_notifications::Migration), + ] + } +} + +pub mod m20250628_000001_create_room_notifications; +pub mod m20250628_000002_create_user; +pub mod m20250628_000003_create_user_2fa; +pub mod m20250628_000004_create_user_activity_log; +pub mod m20250628_000005_create_user_email; +pub mod m20250628_000006_create_user_notification; +pub mod m20250628_000007_create_user_password; +pub mod m20250628_000008_create_user_password_reset; +pub mod m20250628_000009_create_user_preferences; +pub mod m20250628_000010_create_user_relation; +pub mod m20250628_000011_create_user_ssh_key; +pub mod m20250628_000012_create_user_token; +pub mod m20250628_000013_create_project; +pub mod m20250628_000014_create_project_access_log; +pub mod m20250628_000015_create_project_audit_log; +pub mod m20250628_000016_create_project_billing; +pub mod m20250628_000017_create_project_billing_history; +pub mod m20250628_000018_create_project_follow; +pub mod m20250628_000019_create_project_history_name; +pub mod m20250628_000020_create_project_label; +pub mod m20250628_000021_create_project_like; +pub mod m20250628_000022_create_project_member_invitations; +pub mod m20250628_000023_create_project_member_join_answers; +pub mod m20250628_000024_create_project_member_join_request; +pub mod m20250628_000025_create_project_member_join_settings; +pub mod m20250628_000026_create_project_members; +pub mod m20250628_000027_create_project_watch; +pub mod m20250628_000028_create_repo; +pub mod m20250628_000029_create_repo_branch; +pub mod m20250628_000030_create_repo_branch_protect; +pub mod m20250628_000031_create_repo_collaborator; +pub mod m20250628_000032_create_repo_commit; +pub mod m20250628_000033_create_repo_fork; +pub mod m20250628_000034_create_repo_history_name; +pub mod m20250628_000035_create_repo_hook; +pub mod m20250628_000036_create_repo_lfs_lock; +pub mod m20250628_000037_create_repo_lfs_object; +pub mod m20250628_000038_create_repo_lock; +pub mod m20250628_000039_create_repo_star; +pub mod m20250628_000040_create_repo_tag; +pub mod m20250628_000041_create_repo_upstream; +pub mod m20250628_000042_create_repo_watch; +pub mod m20250628_000043_create_repo_webhook; +pub mod m20250628_000044_create_issue; +pub mod m20250628_000045_create_issue_assignee; +pub mod m20250628_000046_create_issue_comment; +pub mod m20250628_000047_create_issue_comment_reaction; +pub mod m20250628_000048_create_issue_label; +pub mod m20250628_000049_create_issue_pull_request; +pub mod m20250628_000050_create_issue_reaction; +pub mod m20250628_000051_create_issue_repo; +pub mod m20250628_000052_create_issue_subscriber; +pub mod m20250628_000053_create_pull_request; +pub mod m20250628_000054_create_pull_request_commit; +pub mod m20250628_000055_create_pull_request_review; +pub mod m20250628_000056_create_pull_request_review_comment; +pub mod m20250628_000057_create_room_category; +pub mod m20250628_000058_create_room; +pub mod m20250628_000059_create_room_ai; +pub mod m20250628_000060_create_room_member; +pub mod m20250628_000061_create_room_message; +pub mod m20250628_000062_create_room_pin; +pub mod m20250628_000063_create_room_thread; +pub mod m20250628_000064_create_ai_model; +pub mod m20250628_000065_create_ai_model_capability; +pub mod m20250628_000066_create_ai_model_parameter_profile; +pub mod m20250628_000067_create_ai_model_pricing; +pub mod m20250628_000068_create_ai_model_provider; +pub mod m20250628_000069_create_ai_model_version; +pub mod m20250628_000070_create_ai_session; +pub mod m20250628_000071_create_ai_tool_auth; +pub mod m20250628_000072_create_ai_tool_call; +pub mod m20250628_000073_create_label; +pub mod m20250628_000074_create_notify; +pub mod m20250628_000075_fix_column_types_and_names; +pub mod m20250628_000076_create_user_email_change; +pub mod m20250628_000077_create_project_activity; +pub mod m20250628_000078_add_room_member_do_not_disturb; +pub mod m20250628_000079_add_room_message_in_reply_to; +pub mod m20250628_000080_add_message_reactions_and_search; +pub mod m20250628_000081_add_message_edit_history; +pub mod m20250628_000082_add_pr_review_comment_resolve; +pub mod m20250628_000083_add_pr_review_request; +pub mod m20260407_000001_extend_repo_branch_protect; +pub mod m20260407_000002_create_project_board; +pub mod m20260407_000003_add_repo_ai_code_review; +pub mod m20260411_000001_create_workspace; +pub mod m20260411_000002_create_workspace_membership; +pub mod m20260411_000003_add_workspace_id_to_project; +pub mod m20260411_000004_add_invite_token_to_workspace_membership; +pub mod m20260412_000001_create_workspace_billing; +pub mod m20260412_000002_create_workspace_billing_history; +pub mod m20260412_000003_create_project_skill; +pub mod m20260413_000001_add_skill_commit_blob; +pub mod m20260414_000001_create_agent_task; diff --git a/libs/migrate/m20250628_000001_create_room_notifications.rs b/libs/migrate/m20250628_000001_create_room_notifications.rs new file mode 100644 index 0000000..896f6e5 --- /dev/null +++ b/libs/migrate/m20250628_000001_create_room_notifications.rs @@ -0,0 +1,37 @@ +//! SeaORM migration: create room_notifications table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000001_create_room_notifications" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000001_create_room_notifications.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS room_notifications;", + )) + .await?; + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TYPE IF EXISTS notification_type;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000002_create_user.rs b/libs/migrate/m20250628_000002_create_user.rs new file mode 100644 index 0000000..ec40350 --- /dev/null +++ b/libs/migrate/m20250628_000002_create_user.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000002_create_user" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000002_create_user.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000003_create_user_2fa.rs b/libs/migrate/m20250628_000003_create_user_2fa.rs new file mode 100644 index 0000000..9610a90 --- /dev/null +++ b/libs/migrate/m20250628_000003_create_user_2fa.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_2fa table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000003_create_user_2fa" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000003_create_user_2fa.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_2fa"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000004_create_user_activity_log.rs b/libs/migrate/m20250628_000004_create_user_activity_log.rs new file mode 100644 index 0000000..f45933e --- /dev/null +++ b/libs/migrate/m20250628_000004_create_user_activity_log.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_activity_log table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000004_create_user_activity_log" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000004_create_user_activity_log.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_activity_log"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000005_create_user_email.rs b/libs/migrate/m20250628_000005_create_user_email.rs new file mode 100644 index 0000000..7d81333 --- /dev/null +++ b/libs/migrate/m20250628_000005_create_user_email.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_email table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000005_create_user_email" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000005_create_user_email.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_email"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000006_create_user_notification.rs b/libs/migrate/m20250628_000006_create_user_notification.rs new file mode 100644 index 0000000..1f7da4c --- /dev/null +++ b/libs/migrate/m20250628_000006_create_user_notification.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_notification table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000006_create_user_notification" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000006_create_user_notification.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_notification"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000007_create_user_password.rs b/libs/migrate/m20250628_000007_create_user_password.rs new file mode 100644 index 0000000..2d12afa --- /dev/null +++ b/libs/migrate/m20250628_000007_create_user_password.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_password table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000007_create_user_password" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000007_create_user_password.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_password"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000008_create_user_password_reset.rs b/libs/migrate/m20250628_000008_create_user_password_reset.rs new file mode 100644 index 0000000..8890f6b --- /dev/null +++ b/libs/migrate/m20250628_000008_create_user_password_reset.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_password_reset table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000008_create_user_password_reset" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000008_create_user_password_reset.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_password_reset"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000009_create_user_preferences.rs b/libs/migrate/m20250628_000009_create_user_preferences.rs new file mode 100644 index 0000000..5c352cf --- /dev/null +++ b/libs/migrate/m20250628_000009_create_user_preferences.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_preferences table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000009_create_user_preferences" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000009_create_user_preferences.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_preferences"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000010_create_user_relation.rs b/libs/migrate/m20250628_000010_create_user_relation.rs new file mode 100644 index 0000000..20703ff --- /dev/null +++ b/libs/migrate/m20250628_000010_create_user_relation.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_relation table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000010_create_user_relation" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000010_create_user_relation.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_relation"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000011_create_user_ssh_key.rs b/libs/migrate/m20250628_000011_create_user_ssh_key.rs new file mode 100644 index 0000000..c84eaff --- /dev/null +++ b/libs/migrate/m20250628_000011_create_user_ssh_key.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_ssh_key table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000011_create_user_ssh_key" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000011_create_user_ssh_key.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_ssh_key"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000012_create_user_token.rs b/libs/migrate/m20250628_000012_create_user_token.rs new file mode 100644 index 0000000..10ed2e9 --- /dev/null +++ b/libs/migrate/m20250628_000012_create_user_token.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_token table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000012_create_user_token" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000012_create_user_token.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "user_token"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000013_create_project.rs b/libs/migrate/m20250628_000013_create_project.rs new file mode 100644 index 0000000..4319d26 --- /dev/null +++ b/libs/migrate/m20250628_000013_create_project.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000013_create_project" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000013_create_project.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000014_create_project_access_log.rs b/libs/migrate/m20250628_000014_create_project_access_log.rs new file mode 100644 index 0000000..b395a31 --- /dev/null +++ b/libs/migrate/m20250628_000014_create_project_access_log.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_access_log table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000014_create_project_access_log" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000014_create_project_access_log.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_access_log"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000015_create_project_audit_log.rs b/libs/migrate/m20250628_000015_create_project_audit_log.rs new file mode 100644 index 0000000..a9bdef0 --- /dev/null +++ b/libs/migrate/m20250628_000015_create_project_audit_log.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_audit_log table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000015_create_project_audit_log" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000015_create_project_audit_log.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_audit_log"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000016_create_project_billing.rs b/libs/migrate/m20250628_000016_create_project_billing.rs new file mode 100644 index 0000000..82b8f21 --- /dev/null +++ b/libs/migrate/m20250628_000016_create_project_billing.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_billing table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000016_create_project_billing" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000016_create_project_billing.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_billing"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000017_create_project_billing_history.rs b/libs/migrate/m20250628_000017_create_project_billing_history.rs new file mode 100644 index 0000000..ecc15a9 --- /dev/null +++ b/libs/migrate/m20250628_000017_create_project_billing_history.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_billing_history table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000017_create_project_billing_history" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000017_create_project_billing_history.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_billing_history"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000018_create_project_follow.rs b/libs/migrate/m20250628_000018_create_project_follow.rs new file mode 100644 index 0000000..be6942f --- /dev/null +++ b/libs/migrate/m20250628_000018_create_project_follow.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_follow table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000018_create_project_follow" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000018_create_project_follow.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_follow"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000019_create_project_history_name.rs b/libs/migrate/m20250628_000019_create_project_history_name.rs new file mode 100644 index 0000000..b7d7b62 --- /dev/null +++ b/libs/migrate/m20250628_000019_create_project_history_name.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_history_name table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000019_create_project_history_name" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000019_create_project_history_name.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_history_name"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000020_create_project_label.rs b/libs/migrate/m20250628_000020_create_project_label.rs new file mode 100644 index 0000000..accd17a --- /dev/null +++ b/libs/migrate/m20250628_000020_create_project_label.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_label table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000020_create_project_label" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000020_create_project_label.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_label"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000021_create_project_like.rs b/libs/migrate/m20250628_000021_create_project_like.rs new file mode 100644 index 0000000..72efe61 --- /dev/null +++ b/libs/migrate/m20250628_000021_create_project_like.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_like table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000021_create_project_like" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000021_create_project_like.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_like"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000022_create_project_member_invitations.rs b/libs/migrate/m20250628_000022_create_project_member_invitations.rs new file mode 100644 index 0000000..9ed5698 --- /dev/null +++ b/libs/migrate/m20250628_000022_create_project_member_invitations.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_member_invitations table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000022_create_project_member_invitations" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000022_create_project_member_invitations.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_member_invitations"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000023_create_project_member_join_answers.rs b/libs/migrate/m20250628_000023_create_project_member_join_answers.rs new file mode 100644 index 0000000..f1afb69 --- /dev/null +++ b/libs/migrate/m20250628_000023_create_project_member_join_answers.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_member_join_answers table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000023_create_project_member_join_answers" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000023_create_project_member_join_answers.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_member_join_answers"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000024_create_project_member_join_request.rs b/libs/migrate/m20250628_000024_create_project_member_join_request.rs new file mode 100644 index 0000000..c01e1ae --- /dev/null +++ b/libs/migrate/m20250628_000024_create_project_member_join_request.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_member_join_request table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000024_create_project_member_join_request" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000024_create_project_member_join_request.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_member_join_request"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000025_create_project_member_join_settings.rs b/libs/migrate/m20250628_000025_create_project_member_join_settings.rs new file mode 100644 index 0000000..3d3e86a --- /dev/null +++ b/libs/migrate/m20250628_000025_create_project_member_join_settings.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_member_join_settings table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000025_create_project_member_join_settings" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000025_create_project_member_join_settings.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_member_join_settings"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000026_create_project_members.rs b/libs/migrate/m20250628_000026_create_project_members.rs new file mode 100644 index 0000000..152719c --- /dev/null +++ b/libs/migrate/m20250628_000026_create_project_members.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_members table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000026_create_project_members" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000026_create_project_members.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_members"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000027_create_project_watch.rs b/libs/migrate/m20250628_000027_create_project_watch.rs new file mode 100644 index 0000000..37b9584 --- /dev/null +++ b/libs/migrate/m20250628_000027_create_project_watch.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_watch table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000027_create_project_watch" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000027_create_project_watch.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "project_watch"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000028_create_repo.rs b/libs/migrate/m20250628_000028_create_repo.rs new file mode 100644 index 0000000..103f1ae --- /dev/null +++ b/libs/migrate/m20250628_000028_create_repo.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000028_create_repo" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000028_create_repo.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000029_create_repo_branch.rs b/libs/migrate/m20250628_000029_create_repo_branch.rs new file mode 100644 index 0000000..27387db --- /dev/null +++ b/libs/migrate/m20250628_000029_create_repo_branch.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_branch table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000029_create_repo_branch" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000029_create_repo_branch.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_branch"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000030_create_repo_branch_protect.rs b/libs/migrate/m20250628_000030_create_repo_branch_protect.rs new file mode 100644 index 0000000..0000abf --- /dev/null +++ b/libs/migrate/m20250628_000030_create_repo_branch_protect.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_branch_protect table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000030_create_repo_branch_protect" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000030_create_repo_branch_protect.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_branch_protect"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000031_create_repo_collaborator.rs b/libs/migrate/m20250628_000031_create_repo_collaborator.rs new file mode 100644 index 0000000..547d3d1 --- /dev/null +++ b/libs/migrate/m20250628_000031_create_repo_collaborator.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_collaborator table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000031_create_repo_collaborator" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000031_create_repo_collaborator.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_collaborator"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000032_create_repo_commit.rs b/libs/migrate/m20250628_000032_create_repo_commit.rs new file mode 100644 index 0000000..ca41d28 --- /dev/null +++ b/libs/migrate/m20250628_000032_create_repo_commit.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_commit table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000032_create_repo_commit" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000032_create_repo_commit.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_commit"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000033_create_repo_fork.rs b/libs/migrate/m20250628_000033_create_repo_fork.rs new file mode 100644 index 0000000..3dc1de7 --- /dev/null +++ b/libs/migrate/m20250628_000033_create_repo_fork.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_fork table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000033_create_repo_fork" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000033_create_repo_fork.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_fork"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000034_create_repo_history_name.rs b/libs/migrate/m20250628_000034_create_repo_history_name.rs new file mode 100644 index 0000000..8414c22 --- /dev/null +++ b/libs/migrate/m20250628_000034_create_repo_history_name.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_history_name table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000034_create_repo_history_name" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000034_create_repo_history_name.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_history_name"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000035_create_repo_hook.rs b/libs/migrate/m20250628_000035_create_repo_hook.rs new file mode 100644 index 0000000..b4a2995 --- /dev/null +++ b/libs/migrate/m20250628_000035_create_repo_hook.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_hook table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000035_create_repo_hook" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000035_create_repo_hook.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_hook"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000036_create_repo_lfs_lock.rs b/libs/migrate/m20250628_000036_create_repo_lfs_lock.rs new file mode 100644 index 0000000..347da2c --- /dev/null +++ b/libs/migrate/m20250628_000036_create_repo_lfs_lock.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_lfs_lock table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000036_create_repo_lfs_lock" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000036_create_repo_lfs_lock.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_lfs_lock"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000037_create_repo_lfs_object.rs b/libs/migrate/m20250628_000037_create_repo_lfs_object.rs new file mode 100644 index 0000000..2ba46c9 --- /dev/null +++ b/libs/migrate/m20250628_000037_create_repo_lfs_object.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_lfs_object table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000037_create_repo_lfs_object" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000037_create_repo_lfs_object.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_lfs_object"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000038_create_repo_lock.rs b/libs/migrate/m20250628_000038_create_repo_lock.rs new file mode 100644 index 0000000..f9a9c5d --- /dev/null +++ b/libs/migrate/m20250628_000038_create_repo_lock.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_lock table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000038_create_repo_lock" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000038_create_repo_lock.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_lock"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000039_create_repo_star.rs b/libs/migrate/m20250628_000039_create_repo_star.rs new file mode 100644 index 0000000..9119672 --- /dev/null +++ b/libs/migrate/m20250628_000039_create_repo_star.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_star table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000039_create_repo_star" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000039_create_repo_star.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_star"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000040_create_repo_tag.rs b/libs/migrate/m20250628_000040_create_repo_tag.rs new file mode 100644 index 0000000..49264bf --- /dev/null +++ b/libs/migrate/m20250628_000040_create_repo_tag.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_tag table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000040_create_repo_tag" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000040_create_repo_tag.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_tag"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000041_create_repo_upstream.rs b/libs/migrate/m20250628_000041_create_repo_upstream.rs new file mode 100644 index 0000000..9abc917 --- /dev/null +++ b/libs/migrate/m20250628_000041_create_repo_upstream.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_upstream table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000041_create_repo_upstream" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000041_create_repo_upstream.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_upstream"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000042_create_repo_watch.rs b/libs/migrate/m20250628_000042_create_repo_watch.rs new file mode 100644 index 0000000..028efaa --- /dev/null +++ b/libs/migrate/m20250628_000042_create_repo_watch.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_watch table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000042_create_repo_watch" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000042_create_repo_watch.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_watch"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000043_create_repo_webhook.rs b/libs/migrate/m20250628_000043_create_repo_webhook.rs new file mode 100644 index 0000000..a0d49c3 --- /dev/null +++ b/libs/migrate/m20250628_000043_create_repo_webhook.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create repo_webhook table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000043_create_repo_webhook" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000043_create_repo_webhook.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "repo_webhook"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000044_create_issue.rs b/libs/migrate/m20250628_000044_create_issue.rs new file mode 100644 index 0000000..b1d427a --- /dev/null +++ b/libs/migrate/m20250628_000044_create_issue.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000044_create_issue" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000044_create_issue.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000045_create_issue_assignee.rs b/libs/migrate/m20250628_000045_create_issue_assignee.rs new file mode 100644 index 0000000..2497b7b --- /dev/null +++ b/libs/migrate/m20250628_000045_create_issue_assignee.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_assignee table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000045_create_issue_assignee" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000045_create_issue_assignee.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_assignee"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000046_create_issue_comment.rs b/libs/migrate/m20250628_000046_create_issue_comment.rs new file mode 100644 index 0000000..0bc6d6d --- /dev/null +++ b/libs/migrate/m20250628_000046_create_issue_comment.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_comment table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000046_create_issue_comment" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000046_create_issue_comment.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_comment"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000047_create_issue_comment_reaction.rs b/libs/migrate/m20250628_000047_create_issue_comment_reaction.rs new file mode 100644 index 0000000..be54cb4 --- /dev/null +++ b/libs/migrate/m20250628_000047_create_issue_comment_reaction.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_comment_reaction table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000047_create_issue_comment_reaction" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000047_create_issue_comment_reaction.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_comment_reaction"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000048_create_issue_label.rs b/libs/migrate/m20250628_000048_create_issue_label.rs new file mode 100644 index 0000000..875ab69 --- /dev/null +++ b/libs/migrate/m20250628_000048_create_issue_label.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_label table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000048_create_issue_label" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000048_create_issue_label.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_label"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000049_create_issue_pull_request.rs b/libs/migrate/m20250628_000049_create_issue_pull_request.rs new file mode 100644 index 0000000..7939702 --- /dev/null +++ b/libs/migrate/m20250628_000049_create_issue_pull_request.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_pull_request table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000049_create_issue_pull_request" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000049_create_issue_pull_request.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_pull_request"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000050_create_issue_reaction.rs b/libs/migrate/m20250628_000050_create_issue_reaction.rs new file mode 100644 index 0000000..51404f6 --- /dev/null +++ b/libs/migrate/m20250628_000050_create_issue_reaction.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_reaction table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000050_create_issue_reaction" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000050_create_issue_reaction.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_reaction"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000051_create_issue_repo.rs b/libs/migrate/m20250628_000051_create_issue_repo.rs new file mode 100644 index 0000000..fe7e4c2 --- /dev/null +++ b/libs/migrate/m20250628_000051_create_issue_repo.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_repo table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000051_create_issue_repo" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000051_create_issue_repo.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_repo"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000052_create_issue_subscriber.rs b/libs/migrate/m20250628_000052_create_issue_subscriber.rs new file mode 100644 index 0000000..0907af4 --- /dev/null +++ b/libs/migrate/m20250628_000052_create_issue_subscriber.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create issue_subscriber table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000052_create_issue_subscriber" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000052_create_issue_subscriber.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "issue_subscriber"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000053_create_pull_request.rs b/libs/migrate/m20250628_000053_create_pull_request.rs new file mode 100644 index 0000000..3360638 --- /dev/null +++ b/libs/migrate/m20250628_000053_create_pull_request.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create pull_request table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000053_create_pull_request" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000053_create_pull_request.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "pull_request"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000054_create_pull_request_commit.rs b/libs/migrate/m20250628_000054_create_pull_request_commit.rs new file mode 100644 index 0000000..cf6b0ef --- /dev/null +++ b/libs/migrate/m20250628_000054_create_pull_request_commit.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create pull_request_commit table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000054_create_pull_request_commit" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000054_create_pull_request_commit.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "pull_request_commit"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000055_create_pull_request_review.rs b/libs/migrate/m20250628_000055_create_pull_request_review.rs new file mode 100644 index 0000000..67f3ad0 --- /dev/null +++ b/libs/migrate/m20250628_000055_create_pull_request_review.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create pull_request_review table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000055_create_pull_request_review" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000055_create_pull_request_review.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "pull_request_review"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000056_create_pull_request_review_comment.rs b/libs/migrate/m20250628_000056_create_pull_request_review_comment.rs new file mode 100644 index 0000000..6c8c33e --- /dev/null +++ b/libs/migrate/m20250628_000056_create_pull_request_review_comment.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create pull_request_review_comment table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000056_create_pull_request_review_comment" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000056_create_pull_request_review_comment.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "pull_request_review_comment"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000057_create_room_category.rs b/libs/migrate/m20250628_000057_create_room_category.rs new file mode 100644 index 0000000..43a7958 --- /dev/null +++ b/libs/migrate/m20250628_000057_create_room_category.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_category table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000057_create_room_category" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000057_create_room_category.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_category"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000058_create_room.rs b/libs/migrate/m20250628_000058_create_room.rs new file mode 100644 index 0000000..66ed6d9 --- /dev/null +++ b/libs/migrate/m20250628_000058_create_room.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000058_create_room" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000058_create_room.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000059_create_room_ai.rs b/libs/migrate/m20250628_000059_create_room_ai.rs new file mode 100644 index 0000000..3d2dddd --- /dev/null +++ b/libs/migrate/m20250628_000059_create_room_ai.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_ai table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000059_create_room_ai" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000059_create_room_ai.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_ai"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000060_create_room_member.rs b/libs/migrate/m20250628_000060_create_room_member.rs new file mode 100644 index 0000000..b8d7a31 --- /dev/null +++ b/libs/migrate/m20250628_000060_create_room_member.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_member table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000060_create_room_member" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000060_create_room_member.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_member"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000061_create_room_message.rs b/libs/migrate/m20250628_000061_create_room_message.rs new file mode 100644 index 0000000..ee0db0c --- /dev/null +++ b/libs/migrate/m20250628_000061_create_room_message.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_message table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000061_create_room_message" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000061_create_room_message.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_message"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000062_create_room_pin.rs b/libs/migrate/m20250628_000062_create_room_pin.rs new file mode 100644 index 0000000..8118273 --- /dev/null +++ b/libs/migrate/m20250628_000062_create_room_pin.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_pin table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000062_create_room_pin" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000062_create_room_pin.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_pin"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000063_create_room_thread.rs b/libs/migrate/m20250628_000063_create_room_thread.rs new file mode 100644 index 0000000..0390bee --- /dev/null +++ b/libs/migrate/m20250628_000063_create_room_thread.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create room_thread table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000063_create_room_thread" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000063_create_room_thread.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "room_thread"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000064_create_ai_model.rs b/libs/migrate/m20250628_000064_create_ai_model.rs new file mode 100644 index 0000000..07e5821 --- /dev/null +++ b/libs/migrate/m20250628_000064_create_ai_model.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000064_create_ai_model" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000064_create_ai_model.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000065_create_ai_model_capability.rs b/libs/migrate/m20250628_000065_create_ai_model_capability.rs new file mode 100644 index 0000000..bbfe8b8 --- /dev/null +++ b/libs/migrate/m20250628_000065_create_ai_model_capability.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model_capability table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000065_create_ai_model_capability" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000065_create_ai_model_capability.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model_capability"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000066_create_ai_model_parameter_profile.rs b/libs/migrate/m20250628_000066_create_ai_model_parameter_profile.rs new file mode 100644 index 0000000..fb2e3b0 --- /dev/null +++ b/libs/migrate/m20250628_000066_create_ai_model_parameter_profile.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model_parameter_profile table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000066_create_ai_model_parameter_profile" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000066_create_ai_model_parameter_profile.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model_parameter_profile"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000067_create_ai_model_pricing.rs b/libs/migrate/m20250628_000067_create_ai_model_pricing.rs new file mode 100644 index 0000000..9b987cf --- /dev/null +++ b/libs/migrate/m20250628_000067_create_ai_model_pricing.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model_pricing table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000067_create_ai_model_pricing" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000067_create_ai_model_pricing.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model_pricing"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000068_create_ai_model_provider.rs b/libs/migrate/m20250628_000068_create_ai_model_provider.rs new file mode 100644 index 0000000..fb25757 --- /dev/null +++ b/libs/migrate/m20250628_000068_create_ai_model_provider.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model_provider table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000068_create_ai_model_provider" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000068_create_ai_model_provider.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model_provider"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000069_create_ai_model_version.rs b/libs/migrate/m20250628_000069_create_ai_model_version.rs new file mode 100644 index 0000000..2a987d7 --- /dev/null +++ b/libs/migrate/m20250628_000069_create_ai_model_version.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_model_version table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000069_create_ai_model_version" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000069_create_ai_model_version.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_model_version"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000070_create_ai_session.rs b/libs/migrate/m20250628_000070_create_ai_session.rs new file mode 100644 index 0000000..abb48ba --- /dev/null +++ b/libs/migrate/m20250628_000070_create_ai_session.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_session table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000070_create_ai_session" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000070_create_ai_session.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_session"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000071_create_ai_tool_auth.rs b/libs/migrate/m20250628_000071_create_ai_tool_auth.rs new file mode 100644 index 0000000..144fe38 --- /dev/null +++ b/libs/migrate/m20250628_000071_create_ai_tool_auth.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_tool_auth table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000071_create_ai_tool_auth" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000071_create_ai_tool_auth.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_tool_auth"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000072_create_ai_tool_call.rs b/libs/migrate/m20250628_000072_create_ai_tool_call.rs new file mode 100644 index 0000000..1b567e8 --- /dev/null +++ b/libs/migrate/m20250628_000072_create_ai_tool_call.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create ai_tool_call table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000072_create_ai_tool_call" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000072_create_ai_tool_call.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "ai_tool_call"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000073_create_label.rs b/libs/migrate/m20250628_000073_create_label.rs new file mode 100644 index 0000000..7afbee3 --- /dev/null +++ b/libs/migrate/m20250628_000073_create_label.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create label table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000073_create_label" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000073_create_label.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "label"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000074_create_notify.rs b/libs/migrate/m20250628_000074_create_notify.rs new file mode 100644 index 0000000..01be42e --- /dev/null +++ b/libs/migrate/m20250628_000074_create_notify.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create notify table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000074_create_notify" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000074_create_notify.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + format!("DROP TABLE IF EXISTS {};", "notify"), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000075_fix_column_types_and_names.rs b/libs/migrate/m20250628_000075_fix_column_types_and_names.rs new file mode 100644 index 0000000..585f306 --- /dev/null +++ b/libs/migrate/m20250628_000075_fix_column_types_and_names.rs @@ -0,0 +1,27 @@ +//! SeaORM migration: fix column types (TIMESTAMP -> TIMESTAMPTZ) and column names (*_uuid -> *) +//! +//! This migration corrects two categories of mismatches between the SQL schema and Rust models: +//! 1. Column type: `created_at` and other timestamp columns were created as `TIMESTAMP` instead of `TIMESTAMPTZ` +//! 2. Column names: foreign-key columns use `*_uuid` suffix in SQL but models expect `*` (e.g. `user_uuid` -> `user`) + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000075_fix_column_types_and_names" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000075_fix_column_types_and_names.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, _manager: &SchemaManager) -> Result<(), DbErr> { + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000076_create_user_email_change.rs b/libs/migrate/m20250628_000076_create_user_email_change.rs new file mode 100644 index 0000000..4efb152 --- /dev/null +++ b/libs/migrate/m20250628_000076_create_user_email_change.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create user_email_change table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000076_create_user_email_change" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000076_create_user_email_change.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS user_email_change;".to_string(), + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000077_create_project_activity.rs b/libs/migrate/m20250628_000077_create_project_activity.rs new file mode 100644 index 0000000..9013ec1 --- /dev/null +++ b/libs/migrate/m20250628_000077_create_project_activity.rs @@ -0,0 +1,28 @@ +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000077_create_project_activity" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000077_create_project_activity.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS project_activity;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000078_add_room_member_do_not_disturb.rs b/libs/migrate/m20250628_000078_add_room_member_do_not_disturb.rs new file mode 100644 index 0000000..bb7d0d0 --- /dev/null +++ b/libs/migrate/m20250628_000078_add_room_member_do_not_disturb.rs @@ -0,0 +1,31 @@ +//! SeaORM migration: add DND columns to room_member + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000078_add_room_member_do_not_disturb" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000078_add_room_member_do_not_disturb.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE room_member DROP COLUMN IF EXISTS do_not_disturb, \ + DROP COLUMN IF EXISTS dnd_start_hour, DROP COLUMN IF EXISTS dnd_end_hour;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000079_add_room_message_in_reply_to.rs b/libs/migrate/m20250628_000079_add_room_message_in_reply_to.rs new file mode 100644 index 0000000..5aecd8c --- /dev/null +++ b/libs/migrate/m20250628_000079_add_room_message_in_reply_to.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: add in_reply_to column to room_message + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000079_add_room_message_in_reply_to" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000079_add_room_message_in_reply_to.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE room_message DROP COLUMN IF EXISTS in_reply_to;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000080_add_message_reactions_and_search.rs b/libs/migrate/m20250628_000080_add_message_reactions_and_search.rs new file mode 100644 index 0000000..cae6b57 --- /dev/null +++ b/libs/migrate/m20250628_000080_add_message_reactions_and_search.rs @@ -0,0 +1,28 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000080_add_message_reactions_and_search.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table( + Table::drop() + .table(Alias::new("room_message_reaction")) + .to_owned(), + ) + .await?; + + let sql = "DROP TRIGGER IF EXISTS room_message_tsv_update ON room_message; + DROP FUNCTION IF EXISTS room_message_tsv_trigger; + DROP INDEX IF EXISTS idx_room_message_content_tsv; + ALTER TABLE room_message DROP COLUMN IF EXISTS content_tsv;"; + super::execute_sql(manager, sql).await + } +} diff --git a/libs/migrate/m20250628_000081_add_message_edit_history.rs b/libs/migrate/m20250628_000081_add_message_edit_history.rs new file mode 100644 index 0000000..572f8fb --- /dev/null +++ b/libs/migrate/m20250628_000081_add_message_edit_history.rs @@ -0,0 +1,22 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000081_add_message_edit_history.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table( + Table::drop() + .table(Alias::new("room_message_edit_history")) + .to_owned(), + ) + .await + } +} diff --git a/libs/migrate/m20250628_000082_add_pr_review_comment_resolve.rs b/libs/migrate/m20250628_000082_add_pr_review_comment_resolve.rs new file mode 100644 index 0000000..484d7aa --- /dev/null +++ b/libs/migrate/m20250628_000082_add_pr_review_comment_resolve.rs @@ -0,0 +1,32 @@ +//! SeaORM migration: add resolved and in_reply_to columns to pull_request_review_comment + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000082_add_pr_review_comment_resolve" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000082_add_pr_review_comment_resolve.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE pull_request_review_comment \ + DROP COLUMN IF EXISTS resolved, \ + DROP COLUMN IF EXISTS in_reply_to;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20250628_000083_add_pr_review_request.rs b/libs/migrate/m20250628_000083_add_pr_review_request.rs new file mode 100644 index 0000000..b0659e3 --- /dev/null +++ b/libs/migrate/m20250628_000083_add_pr_review_request.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create pull_request_review_request table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20250628_000083_add_pr_review_request" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20250628_000083_add_pr_review_request.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS pull_request_review_request;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260407_000001_extend_repo_branch_protect.rs b/libs/migrate/m20260407_000001_extend_repo_branch_protect.rs new file mode 100644 index 0000000..a1254f6 --- /dev/null +++ b/libs/migrate/m20260407_000001_extend_repo_branch_protect.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: extend repo_branch_protect with enhanced fields + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260407_000001_extend_repo_branch_protect" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260407_000001_extend_repo_branch_protect.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE repo_branch_protect DROP COLUMN IF EXISTS required_approvals, DROP COLUMN IF EXISTS dismiss_stale_reviews, DROP COLUMN IF EXISTS require_linear_history, DROP COLUMN IF EXISTS allow_fork_syncing;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260407_000002_create_project_board.rs b/libs/migrate/m20260407_000002_create_project_board.rs new file mode 100644 index 0000000..0a74e22 --- /dev/null +++ b/libs/migrate/m20260407_000002_create_project_board.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create project_board tables + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260407_000002_create_project_board" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260407_000002_create_project_board.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS project_board_card; DROP TABLE IF EXISTS project_board_column; DROP TABLE IF EXISTS project_board;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260407_000003_add_repo_ai_code_review.rs b/libs/migrate/m20260407_000003_add_repo_ai_code_review.rs new file mode 100644 index 0000000..8920091 --- /dev/null +++ b/libs/migrate/m20260407_000003_add_repo_ai_code_review.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: add ai_code_review_enabled to repo table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260407_000003_add_repo_ai_code_review" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260407_000003_add_repo_ai_code_review.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE repo DROP COLUMN IF EXISTS ai_code_review_enabled;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260411_000001_create_workspace.rs b/libs/migrate/m20260411_000001_create_workspace.rs new file mode 100644 index 0000000..55cd6cc --- /dev/null +++ b/libs/migrate/m20260411_000001_create_workspace.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create workspace table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260411_000001_create_workspace" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260411_000001_create_workspace.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS workspace;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260411_000002_create_workspace_membership.rs b/libs/migrate/m20260411_000002_create_workspace_membership.rs new file mode 100644 index 0000000..79c0d12 --- /dev/null +++ b/libs/migrate/m20260411_000002_create_workspace_membership.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create workspace_membership table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260411_000002_create_workspace_membership" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260411_000002_create_workspace_membership.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS workspace_membership;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260411_000003_add_workspace_id_to_project.rs b/libs/migrate/m20260411_000003_add_workspace_id_to_project.rs new file mode 100644 index 0000000..00d59a0 --- /dev/null +++ b/libs/migrate/m20260411_000003_add_workspace_id_to_project.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: add workspace_id foreign key to project table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260411_000003_add_workspace_id_to_project" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260411_000003_add_workspace_id_to_project.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE project DROP COLUMN IF EXISTS workspace_id;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260411_000004_add_invite_token_to_workspace_membership.rs b/libs/migrate/m20260411_000004_add_invite_token_to_workspace_membership.rs new file mode 100644 index 0000000..5808b6f --- /dev/null +++ b/libs/migrate/m20260411_000004_add_invite_token_to_workspace_membership.rs @@ -0,0 +1,32 @@ +//! SeaORM migration: add invite_token fields to workspace_membership + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260411_000004_add_invite_token_to_workspace_membership" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260411_000004_add_invite_token_to_workspace_membership.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw( + sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "ALTER TABLE workspace_membership DROP COLUMN IF EXISTS invite_token, DROP COLUMN IF EXISTS invite_expires_at;", + ), + ) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260412_000001_create_workspace_billing.rs b/libs/migrate/m20260412_000001_create_workspace_billing.rs new file mode 100644 index 0000000..0ad2710 --- /dev/null +++ b/libs/migrate/m20260412_000001_create_workspace_billing.rs @@ -0,0 +1,37 @@ +//! SeaORM migration: create workspace_billing table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260412_000001_create_workspace_billing" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260412_000001_create_workspace_billing.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS workspace_billing_history CASCADE;", + )) + .await?; + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS workspace_billing CASCADE;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260412_000002_create_workspace_billing_history.rs b/libs/migrate/m20260412_000002_create_workspace_billing_history.rs new file mode 100644 index 0000000..eeb69d1 --- /dev/null +++ b/libs/migrate/m20260412_000002_create_workspace_billing_history.rs @@ -0,0 +1,30 @@ +//! SeaORM migration: create workspace_billing_history table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260412_000002_create_workspace_billing_history" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260412_000002_create_workspace_billing_history.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .get_connection() + .execute_raw(sea_orm::Statement::from_string( + sea_orm::DbBackend::Postgres, + "DROP TABLE IF EXISTS workspace_billing_history;", + )) + .await?; + Ok(()) + } +} diff --git a/libs/migrate/m20260412_000003_create_project_skill.rs b/libs/migrate/m20260412_000003_create_project_skill.rs new file mode 100644 index 0000000..86f5e91 --- /dev/null +++ b/libs/migrate/m20260412_000003_create_project_skill.rs @@ -0,0 +1,23 @@ +//! SeaORM migration: create project_skill table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260412_000003_create_project_skill" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260412_000003_create_project_skill.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + super::execute_sql(manager, "DROP TABLE IF EXISTS project_skill;").await + } +} diff --git a/libs/migrate/m20260413_000001_add_skill_commit_blob.rs b/libs/migrate/m20260413_000001_add_skill_commit_blob.rs new file mode 100644 index 0000000..3e2bb14 --- /dev/null +++ b/libs/migrate/m20260413_000001_add_skill_commit_blob.rs @@ -0,0 +1,23 @@ +//! SeaORM migration: add commit_sha and blob_hash to project_skill + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260413_000001_add_skill_commit_blob" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260413_000001_add_skill_commit_blob.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + super::execute_sql(manager, "ALTER TABLE project_skill DROP COLUMN IF EXISTS commit_sha, DROP COLUMN IF EXISTS blob_hash;").await + } +} diff --git a/libs/migrate/m20260414_000001_create_agent_task.rs b/libs/migrate/m20260414_000001_create_agent_task.rs new file mode 100644 index 0000000..51f7dea --- /dev/null +++ b/libs/migrate/m20260414_000001_create_agent_task.rs @@ -0,0 +1,23 @@ +//! SeaORM migration: create agent_task table + +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m20260414_000001_create_agent_task" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let sql = include_str!("sql/m20260414_000001_create_agent_task.sql"); + super::execute_sql(manager, sql).await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + super::execute_sql(manager, "DROP TABLE IF EXISTS agent_task;").await + } +} diff --git a/libs/migrate/sql/m20250628_000001_create_room_notifications.sql b/libs/migrate/sql/m20250628_000001_create_room_notifications.sql new file mode 100644 index 0000000..bf1f4a8 --- /dev/null +++ b/libs/migrate/sql/m20250628_000001_create_room_notifications.sql @@ -0,0 +1,31 @@ +CREATE TYPE notification_type AS ENUM ( + 'mention', + 'invitation', + 'role_change', + 'room_created', + 'room_deleted', + 'system_announcement' +); + +CREATE TABLE IF NOT EXISTS room_notifications ( + id UUID PRIMARY KEY, + room UUID, + project UUID, + user_id UUID, + notification_type VARCHAR(255) NOT NULL, + related_message_id UUID, + related_user_id UUID, + related_room_id UUID, + title VARCHAR(255) NOT NULL, + content TEXT, + metadata JSONB, + is_read BOOLEAN NOT NULL DEFAULT false, + is_archived BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + read_at TIMESTAMPTZ, + expires_at TIMESTAMPTZ +); + +CREATE INDEX idx_room_notifications_user_id_is_read ON room_notifications (user_id, is_read); +CREATE INDEX idx_room_notifications_user_id_created_at ON room_notifications (user_id, created_at); +CREATE INDEX idx_room_notifications_expires_at ON room_notifications (expires_at); diff --git a/libs/migrate/sql/m20250628_000002_create_user.sql b/libs/migrate/sql/m20250628_000002_create_user.sql new file mode 100644 index 0000000..e1d3b88 --- /dev/null +++ b/libs/migrate/sql/m20250628_000002_create_user.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS "user" ( + uid UUID PRIMARY KEY, + username VARCHAR(255) NOT NULL, + display_name VARCHAR(255), + avatar_url VARCHAR(255), + website_url VARCHAR(255), + organization VARCHAR(255), + last_sign_in_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_username ON "user" (username); diff --git a/libs/migrate/sql/m20250628_000003_create_user_2fa.sql b/libs/migrate/sql/m20250628_000003_create_user_2fa.sql new file mode 100644 index 0000000..40ba790 --- /dev/null +++ b/libs/migrate/sql/m20250628_000003_create_user_2fa.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS user_2fa ( + "user" UUID PRIMARY KEY, + method VARCHAR(255) NOT NULL, + secret VARCHAR(255), + backup_codes JSONB NOT NULL, + is_enabled BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000004_create_user_activity_log.sql b/libs/migrate/sql/m20250628_000004_create_user_activity_log.sql new file mode 100644 index 0000000..b1d63d8 --- /dev/null +++ b/libs/migrate/sql/m20250628_000004_create_user_activity_log.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS user_activity_log ( + id BIGSERIAL PRIMARY KEY, + user_uid UUID, + action VARCHAR(255) NOT NULL, + ip_address VARCHAR(255), + user_agent VARCHAR(255), + details JSONB NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_activity_log_user_uid ON user_activity_log (user_uid); +CREATE INDEX idx_user_activity_log_created_at ON user_activity_log (created_at); diff --git a/libs/migrate/sql/m20250628_000005_create_user_email.sql b/libs/migrate/sql/m20250628_000005_create_user_email.sql new file mode 100644 index 0000000..8b36fd9 --- /dev/null +++ b/libs/migrate/sql/m20250628_000005_create_user_email.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS user_email ( + "user" UUID PRIMARY KEY, + email VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_email_email ON user_email (email); diff --git a/libs/migrate/sql/m20250628_000006_create_user_notification.sql b/libs/migrate/sql/m20250628_000006_create_user_notification.sql new file mode 100644 index 0000000..c6d553f --- /dev/null +++ b/libs/migrate/sql/m20250628_000006_create_user_notification.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS user_notification ( + "user" UUID PRIMARY KEY, + email_enabled BOOLEAN NOT NULL DEFAULT false, + in_app_enabled BOOLEAN NOT NULL DEFAULT true, + push_enabled BOOLEAN NOT NULL DEFAULT false, + digest_mode VARCHAR(255) NOT NULL, + dnd_enabled BOOLEAN NOT NULL DEFAULT false, + dnd_start_minute INTEGER, + dnd_end_minute INTEGER, + marketing_enabled BOOLEAN NOT NULL DEFAULT true, + security_enabled BOOLEAN NOT NULL DEFAULT true, + product_enabled BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000007_create_user_password.sql b/libs/migrate/sql/m20250628_000007_create_user_password.sql new file mode 100644 index 0000000..5175234 --- /dev/null +++ b/libs/migrate/sql/m20250628_000007_create_user_password.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS user_password ( + "user" UUID PRIMARY KEY, + password_hash VARCHAR(255) NOT NULL, + password_salt VARCHAR(255), + is_active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000008_create_user_password_reset.sql b/libs/migrate/sql/m20250628_000008_create_user_password_reset.sql new file mode 100644 index 0000000..ba6e1cb --- /dev/null +++ b/libs/migrate/sql/m20250628_000008_create_user_password_reset.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS user_password_reset ( + token VARCHAR(255) PRIMARY KEY, + user_uid UUID NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + used BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_password_reset_user_uid ON user_password_reset (user_uid); diff --git a/libs/migrate/sql/m20250628_000009_create_user_preferences.sql b/libs/migrate/sql/m20250628_000009_create_user_preferences.sql new file mode 100644 index 0000000..37b8bb4 --- /dev/null +++ b/libs/migrate/sql/m20250628_000009_create_user_preferences.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS user_preferences ( + "user" UUID PRIMARY KEY, + language VARCHAR(255) NOT NULL, + theme VARCHAR(255) NOT NULL, + timezone VARCHAR(255) NOT NULL, + email_notifications BOOLEAN NOT NULL DEFAULT true, + in_app_notifications BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000010_create_user_relation.sql b/libs/migrate/sql/m20250628_000010_create_user_relation.sql new file mode 100644 index 0000000..c5aa0cc --- /dev/null +++ b/libs/migrate/sql/m20250628_000010_create_user_relation.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS user_relation ( + id BIGSERIAL PRIMARY KEY, + "user" UUID NOT NULL, + target UUID NOT NULL, + relation_type VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_relation_user ON user_relation ("user"); +CREATE INDEX idx_user_relation_target ON user_relation (target); diff --git a/libs/migrate/sql/m20250628_000011_create_user_ssh_key.sql b/libs/migrate/sql/m20250628_000011_create_user_ssh_key.sql new file mode 100644 index 0000000..4d4f54a --- /dev/null +++ b/libs/migrate/sql/m20250628_000011_create_user_ssh_key.sql @@ -0,0 +1,17 @@ +CREATE TABLE IF NOT EXISTS user_ssh_key ( + id BIGSERIAL PRIMARY KEY, + "user" UUID NOT NULL, + title VARCHAR(255) NOT NULL, + public_key TEXT NOT NULL, + fingerprint VARCHAR(255) NOT NULL, + key_type VARCHAR(255) NOT NULL, + key_bits INTEGER, + is_verified BOOLEAN NOT NULL DEFAULT false, + last_used_at TIMESTAMPTZ, + expires_at TIMESTAMPTZ, + is_revoked BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_ssh_key_user ON user_ssh_key ("user"); diff --git a/libs/migrate/sql/m20250628_000012_create_user_token.sql b/libs/migrate/sql/m20250628_000012_create_user_token.sql new file mode 100644 index 0000000..9768d2a --- /dev/null +++ b/libs/migrate/sql/m20250628_000012_create_user_token.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS user_token ( + id BIGSERIAL PRIMARY KEY, + "user" UUID NOT NULL, + name VARCHAR(255) NOT NULL, + token_hash VARCHAR(255) NOT NULL, + scopes JSONB NOT NULL, + expires_at TIMESTAMPTZ, + is_revoked BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_token_user ON user_token ("user"); diff --git a/libs/migrate/sql/m20250628_000013_create_project.sql b/libs/migrate/sql/m20250628_000013_create_project.sql new file mode 100644 index 0000000..e305003 --- /dev/null +++ b/libs/migrate/sql/m20250628_000013_create_project.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS project ( + id UUID PRIMARY KEY, + name VARCHAR(255) NOT NULL, + display_name VARCHAR(255) NOT NULL, + avatar_url VARCHAR(255), + description TEXT, + is_public BOOLEAN NOT NULL DEFAULT false, + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_name ON project (name); +CREATE INDEX idx_project_created_by ON project (created_by); diff --git a/libs/migrate/sql/m20250628_000014_create_project_access_log.sql b/libs/migrate/sql/m20250628_000014_create_project_access_log.sql new file mode 100644 index 0000000..b255975 --- /dev/null +++ b/libs/migrate/sql/m20250628_000014_create_project_access_log.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS project_access_log ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + actor_uid UUID, + action VARCHAR(255) NOT NULL, + ip_address VARCHAR(255), + user_agent VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_access_log_project ON project_access_log (project); +CREATE INDEX idx_project_access_log_created_at ON project_access_log (created_at); diff --git a/libs/migrate/sql/m20250628_000015_create_project_audit_log.sql b/libs/migrate/sql/m20250628_000015_create_project_audit_log.sql new file mode 100644 index 0000000..0c403bc --- /dev/null +++ b/libs/migrate/sql/m20250628_000015_create_project_audit_log.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS project_audit_log ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + actor UUID NOT NULL, + action TEXT NOT NULL, + details JSONB, + ip_address VARCHAR(255), + user_agent VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_audit_log_project ON project_audit_log (project); +CREATE INDEX idx_project_audit_log_created_at ON project_audit_log (created_at); diff --git a/libs/migrate/sql/m20250628_000016_create_project_billing.sql b/libs/migrate/sql/m20250628_000016_create_project_billing.sql new file mode 100644 index 0000000..88362cd --- /dev/null +++ b/libs/migrate/sql/m20250628_000016_create_project_billing.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS project_billing ( + project_uuid UUID PRIMARY KEY, + balance NUMERIC NOT NULL DEFAULT 0.0, + currency TEXT NOT NULL, + user_uuid UUID, + updated_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000017_create_project_billing_history.sql b/libs/migrate/sql/m20250628_000017_create_project_billing_history.sql new file mode 100644 index 0000000..b9b89a8 --- /dev/null +++ b/libs/migrate/sql/m20250628_000017_create_project_billing_history.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS project_billing_history ( + uid UUID PRIMARY KEY, + project UUID NOT NULL, + "user" UUID, + amount NUMERIC NOT NULL, + currency TEXT NOT NULL, + reason TEXT NOT NULL, + extra JSONB, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_billing_history_project ON project_billing_history (project); diff --git a/libs/migrate/sql/m20250628_000018_create_project_follow.sql b/libs/migrate/sql/m20250628_000018_create_project_follow.sql new file mode 100644 index 0000000..304bec3 --- /dev/null +++ b/libs/migrate/sql/m20250628_000018_create_project_follow.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS project_follow ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + "user" UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + UNIQUE (project, "user") +); + +CREATE UNIQUE INDEX idx_project_follow_project_user ON project_follow (project, "user"); diff --git a/libs/migrate/sql/m20250628_000019_create_project_history_name.sql b/libs/migrate/sql/m20250628_000019_create_project_history_name.sql new file mode 100644 index 0000000..4a611c9 --- /dev/null +++ b/libs/migrate/sql/m20250628_000019_create_project_history_name.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS project_history_name ( + id BIGSERIAL PRIMARY KEY, + project_uid UUID NOT NULL, + history_name VARCHAR(255) NOT NULL, + changed_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_history_name_project_uid ON project_history_name (project_uid); diff --git a/libs/migrate/sql/m20250628_000020_create_project_label.sql b/libs/migrate/sql/m20250628_000020_create_project_label.sql new file mode 100644 index 0000000..7ced3d8 --- /dev/null +++ b/libs/migrate/sql/m20250628_000020_create_project_label.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS project_label ( + id BIGSERIAL PRIMARY KEY, + project_uuid UUID NOT NULL, + label_id BIGINT NOT NULL, + relation_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_label_project ON project_label (project_uuid); diff --git a/libs/migrate/sql/m20250628_000021_create_project_like.sql b/libs/migrate/sql/m20250628_000021_create_project_like.sql new file mode 100644 index 0000000..7c89cd3 --- /dev/null +++ b/libs/migrate/sql/m20250628_000021_create_project_like.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS project_like ( + project UUID NOT NULL, + "user" UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (project, "user") +); diff --git a/libs/migrate/sql/m20250628_000022_create_project_member_invitations.sql b/libs/migrate/sql/m20250628_000022_create_project_member_invitations.sql new file mode 100644 index 0000000..b3ce559 --- /dev/null +++ b/libs/migrate/sql/m20250628_000022_create_project_member_invitations.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS project_member_invitations ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + "user" UUID NOT NULL, + invited_by UUID NOT NULL, + scope VARCHAR(255) NOT NULL, + accepted BOOLEAN NOT NULL DEFAULT false, + accepted_at TIMESTAMPTZ, + rejected BOOLEAN NOT NULL DEFAULT false, + rejected_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_member_invitations_project_user ON project_member_invitations (project, "user"); diff --git a/libs/migrate/sql/m20250628_000023_create_project_member_join_answers.sql b/libs/migrate/sql/m20250628_000023_create_project_member_join_answers.sql new file mode 100644 index 0000000..89a7cc0 --- /dev/null +++ b/libs/migrate/sql/m20250628_000023_create_project_member_join_answers.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS project_member_join_answers ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + "user" UUID NOT NULL, + request_id BIGINT NOT NULL, + question VARCHAR(255) NOT NULL, + answer VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_member_join_answers_request_id ON project_member_join_answers (request_id); diff --git a/libs/migrate/sql/m20250628_000024_create_project_member_join_request.sql b/libs/migrate/sql/m20250628_000024_create_project_member_join_request.sql new file mode 100644 index 0000000..cebc133 --- /dev/null +++ b/libs/migrate/sql/m20250628_000024_create_project_member_join_request.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS project_member_join_request ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + "user" UUID NOT NULL, + status VARCHAR(255) NOT NULL, + message TEXT, + processed_by UUID, + processed_at TIMESTAMPTZ, + reject_reason TEXT, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_member_join_request_project_user ON project_member_join_request (project, "user"); +CREATE INDEX idx_project_member_join_request_status ON project_member_join_request (status); diff --git a/libs/migrate/sql/m20250628_000025_create_project_member_join_settings.sql b/libs/migrate/sql/m20250628_000025_create_project_member_join_settings.sql new file mode 100644 index 0000000..6aaf7f2 --- /dev/null +++ b/libs/migrate/sql/m20250628_000025_create_project_member_join_settings.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS project_member_join_settings ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + require_approval BOOLEAN NOT NULL DEFAULT false, + require_questions BOOLEAN NOT NULL DEFAULT false, + questions JSONB NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000026_create_project_members.sql b/libs/migrate/sql/m20250628_000026_create_project_members.sql new file mode 100644 index 0000000..73f0a11 --- /dev/null +++ b/libs/migrate/sql/m20250628_000026_create_project_members.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS project_members ( + id BIGSERIAL PRIMARY KEY, + project_uuid UUID NOT NULL, + user_uuid UUID NOT NULL, + scope VARCHAR(255) NOT NULL, + joined_at TIMESTAMPTZ NOT NULL, + UNIQUE (project_uuid, user_uuid) +); + +CREATE UNIQUE INDEX idx_project_members_project_user ON project_members (project_uuid, user_uuid); diff --git a/libs/migrate/sql/m20250628_000027_create_project_watch.sql b/libs/migrate/sql/m20250628_000027_create_project_watch.sql new file mode 100644 index 0000000..b549aa7 --- /dev/null +++ b/libs/migrate/sql/m20250628_000027_create_project_watch.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS project_watch ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + "user" UUID NOT NULL, + notifications_enabled BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + UNIQUE (project, "user") +); + +CREATE UNIQUE INDEX idx_project_watch_project_user ON project_watch (project, "user"); diff --git a/libs/migrate/sql/m20250628_000028_create_repo.sql b/libs/migrate/sql/m20250628_000028_create_repo.sql new file mode 100644 index 0000000..2f5bf59 --- /dev/null +++ b/libs/migrate/sql/m20250628_000028_create_repo.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS repo ( + id UUID PRIMARY KEY, + repo_name VARCHAR(255) NOT NULL, + project UUID NOT NULL, + description TEXT, + default_branch VARCHAR(255) NOT NULL, + is_private BOOLEAN NOT NULL DEFAULT false, + storage_path VARCHAR(255) NOT NULL, + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_project ON repo (project); +CREATE INDEX idx_repo_repo_name ON repo (repo_name); diff --git a/libs/migrate/sql/m20250628_000029_create_repo_branch.sql b/libs/migrate/sql/m20250628_000029_create_repo_branch.sql new file mode 100644 index 0000000..a3f8c94 --- /dev/null +++ b/libs/migrate/sql/m20250628_000029_create_repo_branch.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS repo_branch ( + repo UUID NOT NULL, + name VARCHAR(255) NOT NULL, + oid VARCHAR(255) NOT NULL, + upstream VARCHAR(255), + head BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (repo, name) +); + +CREATE INDEX idx_repo_branch_repo ON repo_branch (repo); diff --git a/libs/migrate/sql/m20250628_000030_create_repo_branch_protect.sql b/libs/migrate/sql/m20250628_000030_create_repo_branch_protect.sql new file mode 100644 index 0000000..924977b --- /dev/null +++ b/libs/migrate/sql/m20250628_000030_create_repo_branch_protect.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS repo_branch_protect ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + branch VARCHAR(255) NOT NULL, + forbid_push BOOLEAN NOT NULL DEFAULT false, + forbid_pull BOOLEAN NOT NULL DEFAULT false, + forbid_merge BOOLEAN NOT NULL DEFAULT false, + forbid_deletion BOOLEAN NOT NULL DEFAULT false, + forbid_force_push BOOLEAN NOT NULL DEFAULT false, + forbid_tag_push BOOLEAN NOT NULL DEFAULT false, + UNIQUE (repo_uuid, branch) +); + +CREATE UNIQUE INDEX idx_repo_branch_protect_repo_branch ON repo_branch_protect (repo_uuid, branch); diff --git a/libs/migrate/sql/m20250628_000031_create_repo_collaborator.sql b/libs/migrate/sql/m20250628_000031_create_repo_collaborator.sql new file mode 100644 index 0000000..85d2f62 --- /dev/null +++ b/libs/migrate/sql/m20250628_000031_create_repo_collaborator.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS repo_collaborator ( + repo UUID NOT NULL, + "user" UUID NOT NULL, + scope VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (repo, "user") +); diff --git a/libs/migrate/sql/m20250628_000032_create_repo_commit.sql b/libs/migrate/sql/m20250628_000032_create_repo_commit.sql new file mode 100644 index 0000000..7ee3531 --- /dev/null +++ b/libs/migrate/sql/m20250628_000032_create_repo_commit.sql @@ -0,0 +1,17 @@ +CREATE TABLE IF NOT EXISTS repo_commit ( + id BIGSERIAL PRIMARY KEY, + repo UUID NOT NULL, + oid VARCHAR(255) NOT NULL, + author_name VARCHAR(255) NOT NULL, + author_email VARCHAR(255) NOT NULL, + author UUID, + commiter_name VARCHAR(255) NOT NULL, + commiter_email VARCHAR(255) NOT NULL, + commiter UUID, + message TEXT NOT NULL, + parent JSONB NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_commit_repo ON repo_commit (repo); +CREATE INDEX idx_repo_commit_oid ON repo_commit (oid); diff --git a/libs/migrate/sql/m20250628_000033_create_repo_fork.sql b/libs/migrate/sql/m20250628_000033_create_repo_fork.sql new file mode 100644 index 0000000..1908f83 --- /dev/null +++ b/libs/migrate/sql/m20250628_000033_create_repo_fork.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS repo_fork ( + id BIGSERIAL PRIMARY KEY, + parent_repo UUID NOT NULL, + forked_repo UUID NOT NULL, + forked_by UUID NOT NULL, + forked_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_fork_parent_repo ON repo_fork (parent_repo); +CREATE UNIQUE INDEX idx_repo_fork_forked_repo ON repo_fork (forked_repo); diff --git a/libs/migrate/sql/m20250628_000034_create_repo_history_name.sql b/libs/migrate/sql/m20250628_000034_create_repo_history_name.sql new file mode 100644 index 0000000..92cd36e --- /dev/null +++ b/libs/migrate/sql/m20250628_000034_create_repo_history_name.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS repo_history_name ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + project_uid UUID NOT NULL, + name VARCHAR(255) NOT NULL, + change_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_history_name_repo ON repo_history_name (repo_uuid); diff --git a/libs/migrate/sql/m20250628_000035_create_repo_hook.sql b/libs/migrate/sql/m20250628_000035_create_repo_hook.sql new file mode 100644 index 0000000..d465849 --- /dev/null +++ b/libs/migrate/sql/m20250628_000035_create_repo_hook.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS repo_hook ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + event JSONB NOT NULL, + script TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_hook_repo ON repo_hook (repo_uuid); diff --git a/libs/migrate/sql/m20250628_000036_create_repo_lfs_lock.sql b/libs/migrate/sql/m20250628_000036_create_repo_lfs_lock.sql new file mode 100644 index 0000000..5203ef1 --- /dev/null +++ b/libs/migrate/sql/m20250628_000036_create_repo_lfs_lock.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS repo_lfs_lock ( + repo_uuid UUID NOT NULL, + path VARCHAR(255) NOT NULL, + lock_type VARCHAR(255) NOT NULL, + locked_by UUID NOT NULL, + locked_at TIMESTAMPTZ NOT NULL, + unlocked_at TIMESTAMPTZ, + PRIMARY KEY (repo_uuid, path) +); diff --git a/libs/migrate/sql/m20250628_000037_create_repo_lfs_object.sql b/libs/migrate/sql/m20250628_000037_create_repo_lfs_object.sql new file mode 100644 index 0000000..9571a82 --- /dev/null +++ b/libs/migrate/sql/m20250628_000037_create_repo_lfs_object.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS repo_lfs_object ( + id BIGSERIAL PRIMARY KEY, + oid VARCHAR(255) NOT NULL, + repo_uuid UUID NOT NULL, + size BIGINT NOT NULL, + storage_path VARCHAR(255) NOT NULL, + uploaded_by UUID, + uploaded_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_repo_lfs_object_repo_oid ON repo_lfs_object (repo_uuid, oid); diff --git a/libs/migrate/sql/m20250628_000038_create_repo_lock.sql b/libs/migrate/sql/m20250628_000038_create_repo_lock.sql new file mode 100644 index 0000000..fe1ec35 --- /dev/null +++ b/libs/migrate/sql/m20250628_000038_create_repo_lock.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS repo_lock ( + repo_uuid UUID NOT NULL, + path VARCHAR(255) NOT NULL, + lock_type VARCHAR(255) NOT NULL, + locked_by UUID NOT NULL, + acquired_at TIMESTAMPTZ NOT NULL, + released_at TIMESTAMPTZ, + PRIMARY KEY (repo_uuid, path) +); diff --git a/libs/migrate/sql/m20250628_000039_create_repo_star.sql b/libs/migrate/sql/m20250628_000039_create_repo_star.sql new file mode 100644 index 0000000..c6433c1 --- /dev/null +++ b/libs/migrate/sql/m20250628_000039_create_repo_star.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS repo_star ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + user_uuid UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + UNIQUE (repo_uuid, user_uuid) +); + +CREATE UNIQUE INDEX idx_repo_star_repo_user ON repo_star (repo_uuid, user_uuid); diff --git a/libs/migrate/sql/m20250628_000040_create_repo_tag.sql b/libs/migrate/sql/m20250628_000040_create_repo_tag.sql new file mode 100644 index 0000000..99fd637 --- /dev/null +++ b/libs/migrate/sql/m20250628_000040_create_repo_tag.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS repo_tag ( + repo_uuid UUID NOT NULL, + name VARCHAR(255) NOT NULL, + oid VARCHAR(255) NOT NULL, + color VARCHAR(255), + description TEXT, + created_at TIMESTAMPTZ NOT NULL, + tagger_name VARCHAR(255) NOT NULL, + tagger_email VARCHAR(255) NOT NULL, + tagger_uuid UUID, + PRIMARY KEY (repo_uuid, name) +); diff --git a/libs/migrate/sql/m20250628_000041_create_repo_upstream.sql b/libs/migrate/sql/m20250628_000041_create_repo_upstream.sql new file mode 100644 index 0000000..559f17e --- /dev/null +++ b/libs/migrate/sql/m20250628_000041_create_repo_upstream.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS repo_upstream ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + source_url VARCHAR(255) NOT NULL, + direction VARCHAR(255) NOT NULL, + schedule_cron VARCHAR(255), + last_run_at TIMESTAMPTZ, + next_run_at TIMESTAMPTZ, + status VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + UNIQUE (repo_uuid) +); + +CREATE UNIQUE INDEX idx_repo_upstream_repo ON repo_upstream (repo_uuid); diff --git a/libs/migrate/sql/m20250628_000042_create_repo_watch.sql b/libs/migrate/sql/m20250628_000042_create_repo_watch.sql new file mode 100644 index 0000000..ba67dda --- /dev/null +++ b/libs/migrate/sql/m20250628_000042_create_repo_watch.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS repo_watch ( + id BIGSERIAL PRIMARY KEY, + user_uuid UUID NOT NULL, + repo_uuid UUID NOT NULL, + show_dashboard BOOLEAN NOT NULL DEFAULT false, + notify_email BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + UNIQUE (user_uuid, repo_uuid) +); + +CREATE UNIQUE INDEX idx_repo_watch_user_repo ON repo_watch (user_uuid, repo_uuid); diff --git a/libs/migrate/sql/m20250628_000043_create_repo_webhook.sql b/libs/migrate/sql/m20250628_000043_create_repo_webhook.sql new file mode 100644 index 0000000..10a2231 --- /dev/null +++ b/libs/migrate/sql/m20250628_000043_create_repo_webhook.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS repo_webhook ( + id BIGSERIAL PRIMARY KEY, + repo_uuid UUID NOT NULL, + event JSONB NOT NULL, + url VARCHAR(255), + access_key VARCHAR(255), + secret_key VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL, + last_delivered_at TIMESTAMPTZ, + touch_count BIGINT NOT NULL DEFAULT 0 +); + +CREATE INDEX idx_repo_webhook_repo ON repo_webhook (repo_uuid); diff --git a/libs/migrate/sql/m20250628_000044_create_issue.sql b/libs/migrate/sql/m20250628_000044_create_issue.sql new file mode 100644 index 0000000..ef4efb4 --- /dev/null +++ b/libs/migrate/sql/m20250628_000044_create_issue.sql @@ -0,0 +1,18 @@ +CREATE TABLE IF NOT EXISTS issue ( + id UUID PRIMARY KEY, + project UUID NOT NULL, + number BIGINT NOT NULL, + title VARCHAR(255) NOT NULL, + body TEXT, + state VARCHAR(255) NOT NULL, + author UUID NOT NULL, + milestone VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + closed_at TIMESTAMPTZ, + created_by_ai BOOLEAN NOT NULL DEFAULT false +); + +CREATE INDEX idx_issue_project ON issue (project); +CREATE INDEX idx_issue_author ON issue (author); +CREATE INDEX idx_issue_state ON issue (state); diff --git a/libs/migrate/sql/m20250628_000045_create_issue_assignee.sql b/libs/migrate/sql/m20250628_000045_create_issue_assignee.sql new file mode 100644 index 0000000..4dd98cc --- /dev/null +++ b/libs/migrate/sql/m20250628_000045_create_issue_assignee.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS issue_assignee ( + issue UUID NOT NULL, + "user" UUID NOT NULL, + assigned_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue, "user") +); diff --git a/libs/migrate/sql/m20250628_000046_create_issue_comment.sql b/libs/migrate/sql/m20250628_000046_create_issue_comment.sql new file mode 100644 index 0000000..54b34e5 --- /dev/null +++ b/libs/migrate/sql/m20250628_000046_create_issue_comment.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS issue_comment ( + id BIGSERIAL PRIMARY KEY, + issue UUID NOT NULL, + author UUID NOT NULL, + body TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_issue_comment_issue ON issue_comment (issue); diff --git a/libs/migrate/sql/m20250628_000047_create_issue_comment_reaction.sql b/libs/migrate/sql/m20250628_000047_create_issue_comment_reaction.sql new file mode 100644 index 0000000..7a5959f --- /dev/null +++ b/libs/migrate/sql/m20250628_000047_create_issue_comment_reaction.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS issue_comment_reaction ( + comment_id BIGINT NOT NULL, + user_uuid UUID NOT NULL, + reaction VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (comment_id, user_uuid, reaction) +); diff --git a/libs/migrate/sql/m20250628_000048_create_issue_label.sql b/libs/migrate/sql/m20250628_000048_create_issue_label.sql new file mode 100644 index 0000000..c8f6ea7 --- /dev/null +++ b/libs/migrate/sql/m20250628_000048_create_issue_label.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS issue_label ( + issue UUID NOT NULL, + label BIGINT NOT NULL, + relation_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue, label) +); diff --git a/libs/migrate/sql/m20250628_000049_create_issue_pull_request.sql b/libs/migrate/sql/m20250628_000049_create_issue_pull_request.sql new file mode 100644 index 0000000..b607897 --- /dev/null +++ b/libs/migrate/sql/m20250628_000049_create_issue_pull_request.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS issue_pull_request ( + issue UUID NOT NULL, + repo UUID NOT NULL, + number BIGINT NOT NULL, + relation_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue, repo, number) +); diff --git a/libs/migrate/sql/m20250628_000050_create_issue_reaction.sql b/libs/migrate/sql/m20250628_000050_create_issue_reaction.sql new file mode 100644 index 0000000..726c2b7 --- /dev/null +++ b/libs/migrate/sql/m20250628_000050_create_issue_reaction.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS issue_reaction ( + issue_uuid UUID NOT NULL, + user_uuid UUID NOT NULL, + reaction VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue_uuid, user_uuid, reaction) +); diff --git a/libs/migrate/sql/m20250628_000051_create_issue_repo.sql b/libs/migrate/sql/m20250628_000051_create_issue_repo.sql new file mode 100644 index 0000000..0d0e3f7 --- /dev/null +++ b/libs/migrate/sql/m20250628_000051_create_issue_repo.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS issue_repo ( + issue UUID NOT NULL, + repo UUID NOT NULL, + relation_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue, repo) +); diff --git a/libs/migrate/sql/m20250628_000052_create_issue_subscriber.sql b/libs/migrate/sql/m20250628_000052_create_issue_subscriber.sql new file mode 100644 index 0000000..faab070 --- /dev/null +++ b/libs/migrate/sql/m20250628_000052_create_issue_subscriber.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS issue_subscriber ( + issue UUID NOT NULL, + "user" UUID NOT NULL, + subscribed BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (issue, "user") +); diff --git a/libs/migrate/sql/m20250628_000053_create_pull_request.sql b/libs/migrate/sql/m20250628_000053_create_pull_request.sql new file mode 100644 index 0000000..a41b1c5 --- /dev/null +++ b/libs/migrate/sql/m20250628_000053_create_pull_request.sql @@ -0,0 +1,21 @@ +CREATE TABLE IF NOT EXISTS pull_request ( + repo UUID NOT NULL, + number BIGINT NOT NULL, + issue UUID NOT NULL, + title VARCHAR(255) NOT NULL, + body TEXT, + author UUID NOT NULL, + base VARCHAR(255) NOT NULL, + head VARCHAR(255) NOT NULL, + status VARCHAR(255) NOT NULL, + merged_by UUID, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + merged_at TIMESTAMPTZ, + created_by_ai BOOLEAN NOT NULL DEFAULT false, + PRIMARY KEY (repo, number) +); + +CREATE INDEX idx_pull_request_repo ON pull_request (repo); +CREATE INDEX idx_pull_request_author ON pull_request (author); +CREATE INDEX idx_pull_request_status ON pull_request (status); diff --git a/libs/migrate/sql/m20250628_000054_create_pull_request_commit.sql b/libs/migrate/sql/m20250628_000054_create_pull_request_commit.sql new file mode 100644 index 0000000..0faf312 --- /dev/null +++ b/libs/migrate/sql/m20250628_000054_create_pull_request_commit.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS pull_request_commit ( + repo UUID NOT NULL, + number BIGINT NOT NULL, + commit VARCHAR(255) NOT NULL, + message TEXT NOT NULL, + author_name VARCHAR(255) NOT NULL, + author_email VARCHAR(255) NOT NULL, + authored_at TIMESTAMPTZ NOT NULL, + committer_name VARCHAR(255) NOT NULL, + committer_email VARCHAR(255) NOT NULL, + committed_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (repo, number, commit) +); diff --git a/libs/migrate/sql/m20250628_000055_create_pull_request_review.sql b/libs/migrate/sql/m20250628_000055_create_pull_request_review.sql new file mode 100644 index 0000000..b6dbf40 --- /dev/null +++ b/libs/migrate/sql/m20250628_000055_create_pull_request_review.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS pull_request_review ( + repo UUID NOT NULL, + number BIGINT NOT NULL, + reviewer UUID NOT NULL, + state VARCHAR(255) NOT NULL, + body TEXT, + submitted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (repo, number, reviewer) +); diff --git a/libs/migrate/sql/m20250628_000056_create_pull_request_review_comment.sql b/libs/migrate/sql/m20250628_000056_create_pull_request_review_comment.sql new file mode 100644 index 0000000..8b6a9ad --- /dev/null +++ b/libs/migrate/sql/m20250628_000056_create_pull_request_review_comment.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS pull_request_review_comment ( + repo UUID NOT NULL, + number BIGINT NOT NULL, + id BIGINT NOT NULL, + review UUID, + path TEXT, + side VARCHAR(255), + line BIGINT, + old_line BIGINT, + body TEXT NOT NULL, + author UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (repo, number, id) +); diff --git a/libs/migrate/sql/m20250628_000057_create_room_category.sql b/libs/migrate/sql/m20250628_000057_create_room_category.sql new file mode 100644 index 0000000..d22ef24 --- /dev/null +++ b/libs/migrate/sql/m20250628_000057_create_room_category.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS room_category ( + id UUID PRIMARY KEY, + project_uuid UUID NOT NULL, + name VARCHAR(255) NOT NULL, + position INTEGER NOT NULL, + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_room_category_project ON room_category (project_uuid); diff --git a/libs/migrate/sql/m20250628_000058_create_room.sql b/libs/migrate/sql/m20250628_000058_create_room.sql new file mode 100644 index 0000000..17fc6d2 --- /dev/null +++ b/libs/migrate/sql/m20250628_000058_create_room.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS room ( + id UUID PRIMARY KEY, + project UUID NOT NULL, + room_name VARCHAR(255) NOT NULL, + public BOOLEAN NOT NULL DEFAULT false, + category UUID, + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + last_msg_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_room_project ON room (project); +CREATE INDEX idx_room_category ON room (category); diff --git a/libs/migrate/sql/m20250628_000059_create_room_ai.sql b/libs/migrate/sql/m20250628_000059_create_room_ai.sql new file mode 100644 index 0000000..75d7bef --- /dev/null +++ b/libs/migrate/sql/m20250628_000059_create_room_ai.sql @@ -0,0 +1,17 @@ +CREATE TABLE IF NOT EXISTS room_ai ( + room UUID NOT NULL, + model UUID NOT NULL, + version UUID, + call_count BIGINT NOT NULL DEFAULT 0, + last_call_at TIMESTAMPTZ, + history_limit BIGINT, + system_prompt TEXT, + temperature DOUBLE PRECISION, + max_tokens BIGINT, + use_exact BOOLEAN NOT NULL DEFAULT false, + think BOOLEAN NOT NULL DEFAULT false, + min_score REAL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (room, model) +); diff --git a/libs/migrate/sql/m20250628_000060_create_room_member.sql b/libs/migrate/sql/m20250628_000060_create_room_member.sql new file mode 100644 index 0000000..efcc517 --- /dev/null +++ b/libs/migrate/sql/m20250628_000060_create_room_member.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS room_member ( + room UUID NOT NULL, + "user" UUID NOT NULL, + role VARCHAR(255) NOT NULL, + first_msg_in TIMESTAMPTZ, + joined_at TIMESTAMPTZ, + last_read_seq BIGINT, + PRIMARY KEY (room, "user") +); diff --git a/libs/migrate/sql/m20250628_000061_create_room_message.sql b/libs/migrate/sql/m20250628_000061_create_room_message.sql new file mode 100644 index 0000000..07e8572 --- /dev/null +++ b/libs/migrate/sql/m20250628_000061_create_room_message.sql @@ -0,0 +1,18 @@ +CREATE TABLE IF NOT EXISTS room_message ( + id UUID PRIMARY KEY, + seq BIGINT NOT NULL, + room UUID NOT NULL, + sender_type VARCHAR(255) NOT NULL, + sender_id UUID, + thread UUID, + content TEXT NOT NULL, + content_type VARCHAR(255) NOT NULL, + edited_at TIMESTAMPTZ, + send_at TIMESTAMPTZ NOT NULL, + revoked TIMESTAMPTZ, + revoked_by UUID +); + +CREATE INDEX idx_room_message_room_seq ON room_message (room, seq); +CREATE INDEX idx_room_message_thread ON room_message (thread); +CREATE INDEX idx_room_message_send_at ON room_message (send_at); diff --git a/libs/migrate/sql/m20250628_000062_create_room_pin.sql b/libs/migrate/sql/m20250628_000062_create_room_pin.sql new file mode 100644 index 0000000..2544167 --- /dev/null +++ b/libs/migrate/sql/m20250628_000062_create_room_pin.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS room_pin ( + room UUID NOT NULL, + message UUID NOT NULL, + pinned_by UUID NOT NULL, + pinned_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (room, message) +); diff --git a/libs/migrate/sql/m20250628_000063_create_room_thread.sql b/libs/migrate/sql/m20250628_000063_create_room_thread.sql new file mode 100644 index 0000000..ed44e1b --- /dev/null +++ b/libs/migrate/sql/m20250628_000063_create_room_thread.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS room_thread ( + id UUID PRIMARY KEY, + room UUID NOT NULL, + parent BIGINT NOT NULL, + created_by UUID NOT NULL, + participants JSONB NOT NULL, + last_message_at TIMESTAMPTZ NOT NULL, + last_message_preview TEXT, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_room_thread_room ON room_thread (room); diff --git a/libs/migrate/sql/m20250628_000064_create_ai_model.sql b/libs/migrate/sql/m20250628_000064_create_ai_model.sql new file mode 100644 index 0000000..1589dad --- /dev/null +++ b/libs/migrate/sql/m20250628_000064_create_ai_model.sql @@ -0,0 +1,16 @@ +CREATE TABLE IF NOT EXISTS ai_model ( + id UUID PRIMARY KEY, + provider_id UUID NOT NULL, + name VARCHAR(255) NOT NULL, + modality VARCHAR(255) NOT NULL, + capability VARCHAR(255) NOT NULL, + context_length BIGINT NOT NULL, + max_output_tokens BIGINT, + training_cutoff TIMESTAMPTZ, + is_open_source BOOLEAN NOT NULL DEFAULT false, + status VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_ai_model_provider_id ON ai_model (provider_id); diff --git a/libs/migrate/sql/m20250628_000065_create_ai_model_capability.sql b/libs/migrate/sql/m20250628_000065_create_ai_model_capability.sql new file mode 100644 index 0000000..5d26478 --- /dev/null +++ b/libs/migrate/sql/m20250628_000065_create_ai_model_capability.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS ai_model_capability ( + id BIGSERIAL PRIMARY KEY, + model_version_id UUID NOT NULL, + capability VARCHAR(255) NOT NULL, + is_supported BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_ai_model_capability_model_version_id ON ai_model_capability (model_version_id); diff --git a/libs/migrate/sql/m20250628_000066_create_ai_model_parameter_profile.sql b/libs/migrate/sql/m20250628_000066_create_ai_model_parameter_profile.sql new file mode 100644 index 0000000..0a993b9 --- /dev/null +++ b/libs/migrate/sql/m20250628_000066_create_ai_model_parameter_profile.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS ai_model_parameter_profile ( + id BIGSERIAL PRIMARY KEY, + model_version_id UUID NOT NULL, + temperature_min DOUBLE PRECISION NOT NULL, + temperature_max DOUBLE PRECISION NOT NULL, + top_p_min DOUBLE PRECISION NOT NULL, + top_p_max DOUBLE PRECISION NOT NULL, + frequency_penalty_supported BOOLEAN NOT NULL DEFAULT false, + presence_penalty_supported BOOLEAN NOT NULL DEFAULT false, + UNIQUE (model_version_id) +); + +CREATE UNIQUE INDEX idx_ai_model_parameter_profile_model_version_id ON ai_model_parameter_profile (model_version_id); diff --git a/libs/migrate/sql/m20250628_000067_create_ai_model_pricing.sql b/libs/migrate/sql/m20250628_000067_create_ai_model_pricing.sql new file mode 100644 index 0000000..e2fe96d --- /dev/null +++ b/libs/migrate/sql/m20250628_000067_create_ai_model_pricing.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS ai_model_pricing ( + id BIGSERIAL PRIMARY KEY, + model_version_id UUID NOT NULL, + input_price_per_1k_tokens VARCHAR(255) NOT NULL, + output_price_per_1k_tokens VARCHAR(255) NOT NULL, + currency VARCHAR(255) NOT NULL, + effective_from TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_ai_model_pricing_model_version_id ON ai_model_pricing (model_version_id); diff --git a/libs/migrate/sql/m20250628_000068_create_ai_model_provider.sql b/libs/migrate/sql/m20250628_000068_create_ai_model_provider.sql new file mode 100644 index 0000000..c504d48 --- /dev/null +++ b/libs/migrate/sql/m20250628_000068_create_ai_model_provider.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS ai_model_provider ( + id UUID PRIMARY KEY, + name VARCHAR(255) NOT NULL, + display_name VARCHAR(255) NOT NULL, + website VARCHAR(255), + status VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20250628_000069_create_ai_model_version.sql b/libs/migrate/sql/m20250628_000069_create_ai_model_version.sql new file mode 100644 index 0000000..00bd82b --- /dev/null +++ b/libs/migrate/sql/m20250628_000069_create_ai_model_version.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS ai_model_version ( + id UUID PRIMARY KEY, + model_id UUID NOT NULL, + version VARCHAR(255) NOT NULL, + release_date TIMESTAMPTZ, + change_log TEXT, + is_default BOOLEAN NOT NULL DEFAULT false, + status VARCHAR(255) NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_ai_model_version_model_id ON ai_model_version (model_id); diff --git a/libs/migrate/sql/m20250628_000070_create_ai_session.sql b/libs/migrate/sql/m20250628_000070_create_ai_session.sql new file mode 100644 index 0000000..8402761 --- /dev/null +++ b/libs/migrate/sql/m20250628_000070_create_ai_session.sql @@ -0,0 +1,16 @@ +CREATE TABLE IF NOT EXISTS ai_session ( + id UUID PRIMARY KEY, + room UUID NOT NULL, + model UUID NOT NULL, + version UUID NOT NULL, + token_input BIGINT NOT NULL DEFAULT 0, + token_output BIGINT NOT NULL DEFAULT 0, + latency_ms BIGINT, + cost DOUBLE PRECISION, + currency VARCHAR(255), + error_message TEXT, + error_code VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_ai_session_room ON ai_session (room); diff --git a/libs/migrate/sql/m20250628_000071_create_ai_tool_auth.sql b/libs/migrate/sql/m20250628_000071_create_ai_tool_auth.sql new file mode 100644 index 0000000..0df0702 --- /dev/null +++ b/libs/migrate/sql/m20250628_000071_create_ai_tool_auth.sql @@ -0,0 +1,16 @@ +CREATE TABLE IF NOT EXISTS ai_tool_auth ( + session UUID NOT NULL, + tool_call_id VARCHAR(255) NOT NULL, + method VARCHAR(255) NOT NULL, + arguments TEXT NOT NULL, + decision BOOLEAN NOT NULL DEFAULT false, + reason VARCHAR(255) NOT NULL, + decision_by UUID NOT NULL, + decision_comment TEXT, + logs JSONB NOT NULL, + expires_at TIMESTAMPTZ, + authorized_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (session, tool_call_id) +); diff --git a/libs/migrate/sql/m20250628_000072_create_ai_tool_call.sql b/libs/migrate/sql/m20250628_000072_create_ai_tool_call.sql new file mode 100644 index 0000000..9904e82 --- /dev/null +++ b/libs/migrate/sql/m20250628_000072_create_ai_tool_call.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS ai_tool_call ( + tool_call_id VARCHAR(255) NOT NULL, + session UUID NOT NULL, + tool_name VARCHAR(255) NOT NULL, + caller UUID NOT NULL, + arguments JSONB NOT NULL, + result JSONB NOT NULL, + status VARCHAR(255) NOT NULL, + execution_time_ms BIGINT, + error_message TEXT, + error_stack TEXT, + retry_count INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL, + completed_at TIMESTAMPTZ, + updated_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (tool_call_id, session) +); + +CREATE INDEX idx_ai_tool_call_session ON ai_tool_call (session); +CREATE INDEX idx_ai_tool_call_status ON ai_tool_call (status); diff --git a/libs/migrate/sql/m20250628_000073_create_label.sql b/libs/migrate/sql/m20250628_000073_create_label.sql new file mode 100644 index 0000000..44be8c4 --- /dev/null +++ b/libs/migrate/sql/m20250628_000073_create_label.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS label ( + id BIGSERIAL PRIMARY KEY, + project_uuid UUID NOT NULL, + name VARCHAR(255) NOT NULL, + color VARCHAR(255) NOT NULL +); + +CREATE INDEX idx_label_project ON label (project_uuid); diff --git a/libs/migrate/sql/m20250628_000074_create_notify.sql b/libs/migrate/sql/m20250628_000074_create_notify.sql new file mode 100644 index 0000000..f9c1021 --- /dev/null +++ b/libs/migrate/sql/m20250628_000074_create_notify.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS notify ( + id BIGSERIAL PRIMARY KEY, + user_uuid UUID NOT NULL, + title VARCHAR(255) NOT NULL, + description TEXT, + content TEXT NOT NULL, + url VARCHAR(255), + kind INTEGER NOT NULL, + read_at TIMESTAMPTZ, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_notify_user ON notify (user_uuid); +CREATE INDEX idx_notify_created_at ON notify (created_at); diff --git a/libs/migrate/sql/m20250628_000075_fix_column_types_and_names.sql b/libs/migrate/sql/m20250628_000075_fix_column_types_and_names.sql new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/libs/migrate/sql/m20250628_000075_fix_column_types_and_names.sql @@ -0,0 +1 @@ + diff --git a/libs/migrate/sql/m20250628_000076_create_user_email_change.sql b/libs/migrate/sql/m20250628_000076_create_user_email_change.sql new file mode 100644 index 0000000..58b87fd --- /dev/null +++ b/libs/migrate/sql/m20250628_000076_create_user_email_change.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS user_email_change ( + token VARCHAR(255) PRIMARY KEY, + user_uid UUID NOT NULL, + new_email VARCHAR(255) NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + used BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_user_email_change_user_uid ON user_email_change (user_uid); diff --git a/libs/migrate/sql/m20250628_000077_create_project_activity.sql b/libs/migrate/sql/m20250628_000077_create_project_activity.sql new file mode 100644 index 0000000..f996ee9 --- /dev/null +++ b/libs/migrate/sql/m20250628_000077_create_project_activity.sql @@ -0,0 +1,18 @@ +CREATE TABLE IF NOT EXISTS project_activity ( + id BIGSERIAL PRIMARY KEY, + project UUID NOT NULL, + repo UUID, + actor UUID NOT NULL, + event_type VARCHAR(50) NOT NULL, + event_id UUID, + event_sub_id BIGINT, + title VARCHAR(500) NOT NULL, + content TEXT, + metadata JSONB, + is_private BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX idx_project_activity_project ON project_activity (project); +CREATE INDEX idx_project_activity_created_at ON project_activity (created_at DESC); +CREATE INDEX idx_project_activity_event_type ON project_activity (event_type); diff --git a/libs/migrate/sql/m20250628_000078_add_room_member_do_not_disturb.sql b/libs/migrate/sql/m20250628_000078_add_room_member_do_not_disturb.sql new file mode 100644 index 0000000..a1cd640 --- /dev/null +++ b/libs/migrate/sql/m20250628_000078_add_room_member_do_not_disturb.sql @@ -0,0 +1,9 @@ +-- Add DND (Do Not Disturb) columns to room_member table +-- These columns are referenced by the SeaORM model but were missing from the migration. + +ALTER TABLE room_member + ADD COLUMN IF NOT EXISTS do_not_disturb BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE room_member + ADD COLUMN IF NOT EXISTS dnd_start_hour SMALLINT, + ADD COLUMN IF NOT EXISTS dnd_end_hour SMALLINT; diff --git a/libs/migrate/sql/m20250628_000079_add_room_message_in_reply_to.sql b/libs/migrate/sql/m20250628_000079_add_room_message_in_reply_to.sql new file mode 100644 index 0000000..0e827b7 --- /dev/null +++ b/libs/migrate/sql/m20250628_000079_add_room_message_in_reply_to.sql @@ -0,0 +1,2 @@ +ALTER TABLE room_message + ADD COLUMN IF NOT EXISTS in_reply_to UUID; diff --git a/libs/migrate/sql/m20250628_000080_add_message_reactions_and_search.sql b/libs/migrate/sql/m20250628_000080_add_message_reactions_and_search.sql new file mode 100644 index 0000000..8a69341 --- /dev/null +++ b/libs/migrate/sql/m20250628_000080_add_message_reactions_and_search.sql @@ -0,0 +1,48 @@ +-- Create room_message_reaction table +CREATE TABLE IF NOT EXISTS room_message_reaction +( + id UUID PRIMARY KEY, + room UUID NOT NULL REFERENCES room (id) ON DELETE CASCADE, + message UUID NOT NULL REFERENCES room_message (id) ON DELETE CASCADE, + "user" UUID NOT NULL REFERENCES "user" (uid) ON DELETE CASCADE, + emoji VARCHAR(50) NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + UNIQUE (message, "user", emoji) +); + +-- Index for fast reaction lookups +CREATE INDEX IF NOT EXISTS idx_room_message_reaction_message ON room_message_reaction (message); +CREATE INDEX IF NOT EXISTS idx_room_message_reaction_user ON room_message_reaction ("user"); +CREATE INDEX IF NOT EXISTS idx_room_message_reaction_room ON room_message_reaction (room); + +-- Add full-text search index for message content (PostgreSQL tsvector) +ALTER TABLE room_message + ADD COLUMN IF NOT EXISTS content_tsv TSVECTOR; + +-- Update existing messages with tsvector +UPDATE room_message +SET content_tsv = to_tsvector('english', content) +WHERE content_tsv IS NULL; + +-- Create index for full-text search +CREATE INDEX IF NOT EXISTS idx_room_message_content_tsv ON room_message USING GIN (content_tsv); + +-- Trigger to auto-update content_tsv on insert/update +CREATE OR REPLACE FUNCTION room_message_tsv_trigger() RETURNS TRIGGER AS +' + BEGIN + NEW.content_tsv := to_tsvector(''simple'', COALESCE(NEW.content, '''')); + RETURN NEW; + END; +' LANGUAGE plpgsql; + +CREATE TRIGGER room_message_tsv_update + BEFORE INSERT OR UPDATE + ON room_message + FOR EACH ROW + EXECUTE FUNCTION room_message_tsv_trigger(); + +-- Backfill existing rows +UPDATE room_message +SET content_tsv = to_tsvector('simple', content) +WHERE content_tsv IS NULL; diff --git a/libs/migrate/sql/m20250628_000081_add_message_edit_history.sql b/libs/migrate/sql/m20250628_000081_add_message_edit_history.sql new file mode 100644 index 0000000..5462334 --- /dev/null +++ b/libs/migrate/sql/m20250628_000081_add_message_edit_history.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS room_message_edit_history +( + id UUID PRIMARY KEY, + message UUID NOT NULL REFERENCES room_message (id) ON DELETE CASCADE, + "user" UUID NOT NULL REFERENCES "user" (uid) ON DELETE CASCADE, + old_content TEXT NOT NULL, + new_content TEXT NOT NULL, + edited_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_room_message_edit_history_message ON room_message_edit_history (message); +CREATE INDEX IF NOT EXISTS idx_room_message_edit_history_user ON room_message_edit_history ("user"); diff --git a/libs/migrate/sql/m20250628_000082_add_pr_review_comment_resolve.sql b/libs/migrate/sql/m20250628_000082_add_pr_review_comment_resolve.sql new file mode 100644 index 0000000..445afbd --- /dev/null +++ b/libs/migrate/sql/m20250628_000082_add_pr_review_comment_resolve.sql @@ -0,0 +1,6 @@ +-- Add resolved and in_reply_to columns to pull_request_review_comment +ALTER TABLE pull_request_review_comment + ADD COLUMN resolved BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN in_reply_to BIGINT NULL; + +-- \ No newline at end of file diff --git a/libs/migrate/sql/m20250628_000083_add_pr_review_request.sql b/libs/migrate/sql/m20250628_000083_add_pr_review_request.sql new file mode 100644 index 0000000..35641cb --- /dev/null +++ b/libs/migrate/sql/m20250628_000083_add_pr_review_request.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS pull_request_review_request +( + repo UUID NOT NULL, + number BIGINT NOT NULL, + reviewer UUID NOT NULL, + requested_by UUID NOT NULL, + requested_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + dismissed_at TIMESTAMPTZ NULL, + dismissed_by UUID NULL, + PRIMARY KEY (repo, number, reviewer) +); diff --git a/libs/migrate/sql/m20260407_000001_extend_repo_branch_protect.sql b/libs/migrate/sql/m20260407_000001_extend_repo_branch_protect.sql new file mode 100644 index 0000000..088b51e --- /dev/null +++ b/libs/migrate/sql/m20260407_000001_extend_repo_branch_protect.sql @@ -0,0 +1,5 @@ +-- Extend repo_branch_protect with enhanced branch protection fields +ALTER TABLE repo_branch_protect ADD COLUMN IF NOT EXISTS required_approvals INTEGER NOT NULL DEFAULT 0; +ALTER TABLE repo_branch_protect ADD COLUMN IF NOT EXISTS dismiss_stale_reviews BOOLEAN NOT NULL DEFAULT false; +ALTER TABLE repo_branch_protect ADD COLUMN IF NOT EXISTS require_linear_history BOOLEAN NOT NULL DEFAULT false; +ALTER TABLE repo_branch_protect ADD COLUMN IF NOT EXISTS allow_fork_syncing BOOLEAN NOT NULL DEFAULT true; diff --git a/libs/migrate/sql/m20260407_000002_create_project_board.sql b/libs/migrate/sql/m20260407_000002_create_project_board.sql new file mode 100644 index 0000000..9c209b1 --- /dev/null +++ b/libs/migrate/sql/m20260407_000002_create_project_board.sql @@ -0,0 +1,44 @@ +CREATE TABLE IF NOT EXISTS project_board +( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + project_uuid UUID NOT NULL, + name VARCHAR(255) NOT NULL, + description TEXT, + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_project_board_project ON project_board (project_uuid); + +CREATE TABLE IF NOT EXISTS project_board_column +( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + board_uuid UUID NOT NULL REFERENCES project_board (id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + position INTEGER NOT NULL DEFAULT 0, + wip_limit INTEGER, + color VARCHAR(20) +); + +CREATE INDEX IF NOT EXISTS idx_project_board_column_board ON project_board_column (board_uuid); + +CREATE TABLE IF NOT EXISTS project_board_card +( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + column_uuid UUID NOT NULL REFERENCES project_board_column (id) ON DELETE CASCADE, + issue_id BIGINT, + project UUID, + title VARCHAR(500) NOT NULL, + description TEXT, + position INTEGER NOT NULL DEFAULT 0, + assignee_id UUID, + due_date TIMESTAMPTZ, + priority VARCHAR(10), + created_by UUID NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_project_board_card_column ON project_board_card (column_uuid); +CREATE INDEX IF NOT EXISTS idx_project_board_card_issue ON project_board_card (issue_id) WHERE issue_id IS NOT NULL; diff --git a/libs/migrate/sql/m20260407_000003_add_repo_ai_code_review.sql b/libs/migrate/sql/m20260407_000003_add_repo_ai_code_review.sql new file mode 100644 index 0000000..bbf4ee1 --- /dev/null +++ b/libs/migrate/sql/m20260407_000003_add_repo_ai_code_review.sql @@ -0,0 +1 @@ +ALTER TABLE repo ADD COLUMN IF NOT EXISTS ai_code_review_enabled BOOLEAN NOT NULL DEFAULT false; diff --git a/libs/migrate/sql/m20260411_000001_create_workspace.sql b/libs/migrate/sql/m20260411_000001_create_workspace.sql new file mode 100644 index 0000000..7418576 --- /dev/null +++ b/libs/migrate/sql/m20260411_000001_create_workspace.sql @@ -0,0 +1,18 @@ +CREATE TABLE IF NOT EXISTS workspace ( + id UUID PRIMARY KEY, + slug VARCHAR(255) NOT NULL, + name VARCHAR(255) NOT NULL, + description TEXT, + avatar_url VARCHAR(255), + plan VARCHAR(50) NOT NULL DEFAULT 'free', + billing_email VARCHAR(255), + stripe_customer_id VARCHAR(255), + stripe_subscription_id VARCHAR(255), + plan_expires_at TIMESTAMPTZ, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL +); + +CREATE UNIQUE INDEX idx_workspace_slug ON workspace (slug); +CREATE INDEX idx_workspace_deleted_at ON workspace (deleted_at); diff --git a/libs/migrate/sql/m20260411_000002_create_workspace_membership.sql b/libs/migrate/sql/m20260411_000002_create_workspace_membership.sql new file mode 100644 index 0000000..05c75e8 --- /dev/null +++ b/libs/migrate/sql/m20260411_000002_create_workspace_membership.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS workspace_membership ( + id BIGSERIAL PRIMARY KEY, + workspace_id UUID NOT NULL, + user_id UUID NOT NULL, + role VARCHAR(50) NOT NULL DEFAULT 'member', + status VARCHAR(50) NOT NULL DEFAULT 'active', + invited_by UUID, + joined_at TIMESTAMPTZ NOT NULL, + UNIQUE (workspace_id, user_id) +); + +CREATE UNIQUE INDEX idx_workspace_membership_ws_user ON workspace_membership (workspace_id, user_id); +CREATE INDEX idx_workspace_membership_user ON workspace_membership (user_id); diff --git a/libs/migrate/sql/m20260411_000003_add_workspace_id_to_project.sql b/libs/migrate/sql/m20260411_000003_add_workspace_id_to_project.sql new file mode 100644 index 0000000..7881544 --- /dev/null +++ b/libs/migrate/sql/m20260411_000003_add_workspace_id_to_project.sql @@ -0,0 +1,4 @@ +ALTER TABLE project + ADD COLUMN workspace_id UUID REFERENCES workspace(id) ON DELETE SET NULL; + +CREATE INDEX idx_project_workspace_id ON project (workspace_id) WHERE workspace_id IS NOT NULL; diff --git a/libs/migrate/sql/m20260411_000004_add_invite_token_to_workspace_membership.sql b/libs/migrate/sql/m20260411_000004_add_invite_token_to_workspace_membership.sql new file mode 100644 index 0000000..f35fedc --- /dev/null +++ b/libs/migrate/sql/m20260411_000004_add_invite_token_to_workspace_membership.sql @@ -0,0 +1,5 @@ +ALTER TABLE workspace_membership + ADD COLUMN invite_token VARCHAR(255), + ADD COLUMN invite_expires_at TIMESTAMPTZ; + +CREATE INDEX idx_workspace_membership_invite_token ON workspace_membership (invite_token) WHERE invite_token IS NOT NULL; diff --git a/libs/migrate/sql/m20260412_000001_create_workspace_billing.sql b/libs/migrate/sql/m20260412_000001_create_workspace_billing.sql new file mode 100644 index 0000000..40e757a --- /dev/null +++ b/libs/migrate/sql/m20260412_000001_create_workspace_billing.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS workspace_billing ( + workspace_id UUID PRIMARY KEY REFERENCES workspace(id) ON DELETE CASCADE, + balance DECIMAL(20, 4) NOT NULL DEFAULT 0, + currency VARCHAR(10) NOT NULL DEFAULT 'USD', + monthly_quota DECIMAL(20, 4) NOT NULL DEFAULT 0, + total_spent DECIMAL(20, 4) NOT NULL DEFAULT 0, + updated_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL +); diff --git a/libs/migrate/sql/m20260412_000002_create_workspace_billing_history.sql b/libs/migrate/sql/m20260412_000002_create_workspace_billing_history.sql new file mode 100644 index 0000000..e15b62b --- /dev/null +++ b/libs/migrate/sql/m20260412_000002_create_workspace_billing_history.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS workspace_billing_history +( + uid UUID PRIMARY KEY, + workspace_id UUID NOT NULL REFERENCES workspace (id) ON DELETE CASCADE, + user_id UUID, + amount DECIMAL(20, 4) NOT NULL, + currency VARCHAR(10) NOT NULL DEFAULT 'USD', + reason VARCHAR(100) NOT NULL, + extra JSONB, + created_at TIMESTAMPTZ NOT NULL +); + +CREATE INDEX IF NOT EXISTS idx_wsbh_workspace_id ON workspace_billing_history (workspace_id); +CREATE INDEX IF NOT EXISTS idx_wsbh_created_at ON workspace_billing_history (created_at DESC); diff --git a/libs/migrate/sql/m20260412_000003_create_project_skill.sql b/libs/migrate/sql/m20260412_000003_create_project_skill.sql new file mode 100644 index 0000000..c04a642 --- /dev/null +++ b/libs/migrate/sql/m20260412_000003_create_project_skill.sql @@ -0,0 +1,21 @@ +CREATE TABLE IF NOT EXISTS project_skill +( + id BIGSERIAL PRIMARY KEY, + project_uuid UUID NOT NULL, + slug VARCHAR(255) NOT NULL, + name VARCHAR(255) NOT NULL, + description TEXT, + source VARCHAR(20) NOT NULL DEFAULT 'manual', + repo_id UUID, + content TEXT NOT NULL DEFAULT '', + metadata JSONB NOT NULL DEFAULT '{}', + enabled BOOLEAN NOT NULL DEFAULT true, + created_by UUID, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE (project_uuid, slug) +); + +CREATE INDEX IF NOT EXISTS idx_project_skill_project ON project_skill (project_uuid); +CREATE INDEX IF NOT EXISTS idx_project_skill_slug ON project_skill (slug); +CREATE INDEX IF NOT EXISTS idx_project_skill_source ON project_skill (source); diff --git a/libs/migrate/sql/m20260413_000001_add_skill_commit_blob.sql b/libs/migrate/sql/m20260413_000001_add_skill_commit_blob.sql new file mode 100644 index 0000000..ce12284 --- /dev/null +++ b/libs/migrate/sql/m20260413_000001_add_skill_commit_blob.sql @@ -0,0 +1,7 @@ +-- Add commit_sha and blob_hash columns for repo-sourced skills +ALTER TABLE project_skill + ADD COLUMN IF NOT EXISTS commit_sha VARCHAR(40), + ADD COLUMN IF NOT EXISTS blob_hash VARCHAR(40); + +CREATE INDEX IF NOT EXISTS idx_project_skill_commit_sha ON project_skill (commit_sha); +CREATE INDEX IF NOT EXISTS idx_project_skill_blob_hash ON project_skill (blob_hash); diff --git a/libs/migrate/sql/m20260414_000001_create_agent_task.sql b/libs/migrate/sql/m20260414_000001_create_agent_task.sql new file mode 100644 index 0000000..0c8acbf --- /dev/null +++ b/libs/migrate/sql/m20260414_000001_create_agent_task.sql @@ -0,0 +1,34 @@ +CREATE TABLE IF NOT EXISTS agent_task +( + id BIGSERIAL PRIMARY KEY, + project_uuid UUID NOT NULL, + -- Root task vs sub-task: parent_id NULL = root task + parent_id BIGINT, + -- The AI that owns this task (optional, for sub-agent spawned tasks) + agent_type VARCHAR(20) NOT NULL DEFAULT 'react', + -- Status: pending / running / done / failed + status VARCHAR(20) NOT NULL DEFAULT 'pending', + -- Task description / goal + title VARCHAR(255), + input TEXT NOT NULL, + -- Final output (populated when status = done) + output TEXT, + -- Error message (populated when status = failed) + error TEXT, + -- Who initiated this task + created_by UUID, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + started_at TIMESTAMPTZ, + done_at TIMESTAMPTZ, + -- Progress info (step count, current status text) + progress VARCHAR(255), + -- Parent task FK + CONSTRAINT fk_agent_task_parent FOREIGN KEY (parent_id) REFERENCES agent_task(id) ON DELETE SET NULL +); + +CREATE INDEX IF NOT EXISTS idx_agent_task_project ON agent_task (project_uuid); +CREATE INDEX IF NOT EXISTS idx_agent_task_parent ON agent_task (parent_id); +CREATE INDEX IF NOT EXISTS idx_agent_task_status ON agent_task (status); +CREATE INDEX IF NOT EXISTS idx_agent_task_created_by ON agent_task (created_by); +CREATE INDEX IF NOT EXISTS idx_agent_task_created_at ON agent_task (created_at); diff --git a/libs/models/Cargo.toml b/libs/models/Cargo.toml new file mode 100644 index 0000000..cccd42f --- /dev/null +++ b/libs/models/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "models" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "models" +[dependencies] +sea-orm = { workspace = true, features = ["with-chrono", "with-uuid", "with-rust_decimal", "with-json", "rust_decimal"] } +serde = { workspace = true, features = ["derive"] } +chrono = { workspace = true, features = ["serde"] } +uuid = { workspace = true, features = ["serde"] } +rust_decimal = { workspace = true, features = ["serde"] } +serde_json = { workspace = true } +utoipa = { workspace = true, features = ["chrono", "uuid"] } + + +[lints] +workspace = true diff --git a/libs/models/agent_task/mod.rs b/libs/models/agent_task/mod.rs new file mode 100644 index 0000000..abf2205 --- /dev/null +++ b/libs/models/agent_task/mod.rs @@ -0,0 +1,154 @@ +//! Agent task model — sub-agents and sub-tasks as a unified concept. +//! +//! An `agent_task` represents either: +//! - A **root task** (parent_id = NULL): initiated by a user or system event. +//! The parent agent (Supervisor) spawns sub-tasks and coordinates their results. +//! - A **sub-task** (parent_id = set): a unit of work executed by a sub-agent. +//! +//! Status lifecycle: `pending` → `running` → `done` | `failed` +//! +//! Sub-agents are represented as `agent_task` records with a parent reference, +//! allowing hierarchical task trees and result aggregation. + +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Agent task type — the kind of agent that executes this task. +#[derive( + Clone, Debug, PartialEq, Eq, EnumIter, Serialize, Deserialize, sea_orm::DeriveActiveEnum, +)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum AgentType { + #[sea_orm(string_value = "React")] + React, + #[sea_orm(string_value = "Chat")] + Chat, +} + +impl Default for AgentType { + fn default() -> Self { + AgentType::React + } +} + +impl std::fmt::Display for AgentType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AgentType::React => write!(f, "React"), + AgentType::Chat => write!(f, "Chat"), + } + } +} + +/// Task status lifecycle. +#[derive( + Clone, Debug, PartialEq, Eq, EnumIter, Serialize, Deserialize, sea_orm::DeriveActiveEnum, +)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum TaskStatus { + #[sea_orm(string_value = "Pending")] + Pending, + #[sea_orm(string_value = "Running")] + Running, + #[sea_orm(string_value = "Done")] + Done, + #[sea_orm(string_value = "Failed")] + Failed, +} + +impl Default for TaskStatus { + fn default() -> Self { + TaskStatus::Pending + } +} + +impl std::fmt::Display for TaskStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TaskStatus::Pending => write!(f, "Pending"), + TaskStatus::Running => write!(f, "Running"), + TaskStatus::Done => write!(f, "Done"), + TaskStatus::Failed => write!(f, "Failed"), + } + } +} + +/// Agent task record — represents both root tasks and sub-tasks. +#[derive(Clone, Debug, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "agent_task")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + + /// Project this task belongs to. + pub project_uuid: ProjectId, + + /// Parent task (NULL for root tasks, set for sub-tasks). + #[sea_orm(nullable)] + pub parent_id: Option, + + /// Agent type that executes this task. + #[sea_orm(column_type = "String(StringLen::None)", default = "React")] + pub agent_type: AgentType, + + /// Current task status. + #[sea_orm(column_type = "String(StringLen::None)", default = "Pending")] + pub status: TaskStatus, + + /// Human-readable task title / goal description. + #[sea_orm(nullable)] + pub title: Option, + + /// Task input — the prompt or goal text. + pub input: String, + + /// Task output — populated when status = done. + #[sea_orm(nullable)] + pub output: Option, + + /// Error message — populated when status = failed. + #[sea_orm(nullable)] + pub error: Option, + + /// User who initiated this task. + #[sea_orm(nullable)] + pub created_by: Option, + + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + + /// When execution started (status → running). + #[sea_orm(nullable)] + pub started_at: Option, + + /// When execution completed (status → done | failed). + #[sea_orm(nullable)] + pub done_at: Option, + + /// Current progress description (e.g., "step 2/5: analyzing code"). + #[sea_orm(nullable)] + pub progress: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "Entity", + from = "Column::ParentId", + to = "Column::Id" + )] + ParentTask, +} + +impl ActiveModelBehavior for ActiveModel {} + +impl Model { + pub fn is_root_task(&self) -> bool { + self.parent_id.is_none() + } + + pub fn is_done(&self) -> bool { + matches!(self.status, TaskStatus::Done | TaskStatus::Failed) + } +} diff --git a/libs/models/agents/mod.rs b/libs/models/agents/mod.rs new file mode 100644 index 0000000..dbd3eee --- /dev/null +++ b/libs/models/agents/mod.rs @@ -0,0 +1,171 @@ +use serde::{Deserialize, Serialize}; + +/// Model modality. Stored as `"text"`, `"image"`, `"audio"`, or `"multimodal"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ModelModality { + Text, + Image, + Audio, + Multimodal, +} + +impl std::fmt::Display for ModelModality { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ModelModality::Text => write!(f, "text"), + ModelModality::Image => write!(f, "image"), + ModelModality::Audio => write!(f, "audio"), + ModelModality::Multimodal => write!(f, "multimodal"), + } + } +} + +impl std::str::FromStr for ModelModality { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "text" => Ok(ModelModality::Text), + "image" => Ok(ModelModality::Image), + "audio" => Ok(ModelModality::Audio), + "multimodal" => Ok(ModelModality::Multimodal), + _ => Err("unknown model modality"), + } + } +} + +/// Primary model capability. Stored as `"chat"`, `"completion"`, `"embedding"`, +/// or `"code"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ModelCapability { + Chat, + Completion, + Embedding, + Code, +} + +impl std::fmt::Display for ModelCapability { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ModelCapability::Chat => write!(f, "chat"), + ModelCapability::Completion => write!(f, "completion"), + ModelCapability::Embedding => write!(f, "embedding"), + ModelCapability::Code => write!(f, "code"), + } + } +} + +impl std::str::FromStr for ModelCapability { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "chat" => Ok(ModelCapability::Chat), + "completion" => Ok(ModelCapability::Completion), + "embedding" => Ok(ModelCapability::Embedding), + "code" => Ok(ModelCapability::Code), + _ => Err("unknown model capability"), + } + } +} + +/// Model or model-version availability status. Stored as `"active"` or +/// `"deprecated"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ModelStatus { + Active, + Deprecated, +} + +impl std::fmt::Display for ModelStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ModelStatus::Active => write!(f, "active"), + ModelStatus::Deprecated => write!(f, "deprecated"), + } + } +} + +impl std::str::FromStr for ModelStatus { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "active" => Ok(ModelStatus::Active), + "deprecated" => Ok(ModelStatus::Deprecated), + _ => Err("unknown model status"), + } + } +} + +/// Capability type for per-version capability records. Stored as +/// `"function_call"`, `"tool_use"`, `"vision"`, or `"reasoning"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum CapabilityType { + FunctionCall, + ToolUse, + Vision, + Reasoning, +} + +impl std::fmt::Display for CapabilityType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CapabilityType::FunctionCall => write!(f, "function_call"), + CapabilityType::ToolUse => write!(f, "tool_use"), + CapabilityType::Vision => write!(f, "vision"), + CapabilityType::Reasoning => write!(f, "reasoning"), + } + } +} + +impl std::str::FromStr for CapabilityType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "function_call" => Ok(CapabilityType::FunctionCall), + "tool_use" => Ok(CapabilityType::ToolUse), + "vision" => Ok(CapabilityType::Vision), + "reasoning" => Ok(CapabilityType::Reasoning), + _ => Err("unknown capability type"), + } + } +} + +/// Pricing currency. Stored as `"USD"` or `"CNY"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum PricingCurrency { + Usd, + Cny, +} + +impl std::fmt::Display for PricingCurrency { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PricingCurrency::Usd => write!(f, "USD"), + PricingCurrency::Cny => write!(f, "CNY"), + } + } +} + +impl std::str::FromStr for PricingCurrency { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "USD" => Ok(PricingCurrency::Usd), + "CNY" => Ok(PricingCurrency::Cny), + _ => Err("unknown pricing currency"), + } + } +} + +pub use model::Entity as Model; +pub use model_capability::Entity as ModelCapabilityRecord; +pub use model_parameter_profile::Entity as ModelParameterProfile; +pub use model_pricing::Entity as ModelPricing; +pub use model_provider::Entity as ModelProvider; +pub use model_version::Entity as ModelVersion; + +pub mod model; +pub mod model_capability; +pub mod model_parameter_profile; +pub mod model_pricing; +pub mod model_provider; +pub mod model_version; diff --git a/libs/models/agents/model.rs b/libs/models/agents/model.rs new file mode 100644 index 0000000..05f3d18 --- /dev/null +++ b/libs/models/agents/model.rs @@ -0,0 +1,42 @@ +use crate::{DateTimeUtc, ModelId, ModelProviderId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::{ModelCapability, ModelModality, ModelStatus}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ModelId, + pub provider_id: ModelProviderId, + pub name: String, + pub modality: String, + pub capability: String, + pub context_length: i64, + pub max_output_tokens: Option, + pub training_cutoff: Option, + pub is_open_source: bool, + pub status: String, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +impl Model { + pub fn modality_enum(&self) -> Result { + self.modality.parse() + } + + pub fn capability_enum(&self) -> Result { + self.capability.parse() + } + + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/agents/model_capability.rs b/libs/models/agents/model_capability.rs new file mode 100644 index 0000000..6071144 --- /dev/null +++ b/libs/models/agents/model_capability.rs @@ -0,0 +1,27 @@ +use crate::DateTimeUtc; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::CapabilityType; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model_capability")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub model_version_id: i64, + pub capability: String, + pub is_supported: bool, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn capability_enum(&self) -> Result { + self.capability.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/agents/model_parameter_profile.rs b/libs/models/agents/model_parameter_profile.rs new file mode 100644 index 0000000..105a1d9 --- /dev/null +++ b/libs/models/agents/model_parameter_profile.rs @@ -0,0 +1,22 @@ +use crate::ModelVersionId; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model_parameter_profile")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub model_version_id: ModelVersionId, + pub temperature_min: f64, + pub temperature_max: f64, + pub top_p_min: f64, + pub top_p_max: f64, + pub frequency_penalty_supported: bool, + pub presence_penalty_supported: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/agents/model_pricing.rs b/libs/models/agents/model_pricing.rs new file mode 100644 index 0000000..a8b4ce1 --- /dev/null +++ b/libs/models/agents/model_pricing.rs @@ -0,0 +1,28 @@ +use crate::{DateTimeUtc, ModelVersionId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::PricingCurrency; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model_pricing")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub model_version_id: ModelVersionId, + pub input_price_per_1k_tokens: String, + pub output_price_per_1k_tokens: String, + pub currency: String, + pub effective_from: DateTimeUtc, +} + +impl Model { + pub fn currency_enum(&self) -> Result { + self.currency.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/agents/model_provider.rs b/libs/models/agents/model_provider.rs new file mode 100644 index 0000000..aa167a4 --- /dev/null +++ b/libs/models/agents/model_provider.rs @@ -0,0 +1,29 @@ +use crate::{DateTimeUtc, ModelProviderId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ModelStatus; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model_provider")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ModelProviderId, + pub name: String, + pub display_name: String, + pub website: Option, + pub status: String, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +impl Model { + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/agents/model_version.rs b/libs/models/agents/model_version.rs new file mode 100644 index 0000000..72bea36 --- /dev/null +++ b/libs/models/agents/model_version.rs @@ -0,0 +1,30 @@ +use crate::{DateTimeUtc, ModelId, ModelVersionId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ModelStatus; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_model_version")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ModelVersionId, + pub model_id: ModelId, + pub version: String, + pub release_date: Option, + pub change_log: Option, + pub is_default: bool, + pub status: String, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/ai/ai_session.rs b/libs/models/ai/ai_session.rs new file mode 100644 index 0000000..e3dd0fc --- /dev/null +++ b/libs/models/ai/ai_session.rs @@ -0,0 +1,26 @@ +use crate::{AiSessionId, DateTimeUtc, ModelId, ModelVersionId, RoomId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_session")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: AiSessionId, + pub room: RoomId, + pub model: ModelId, + pub version: ModelVersionId, + pub token_input: i64, + pub token_output: i64, + pub latency_ms: Option, + pub cost: Option, + pub currency: Option, + pub error_message: Option, + pub error_code: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/ai/ai_tool_auth.rs b/libs/models/ai/ai_tool_auth.rs new file mode 100644 index 0000000..19d7f62 --- /dev/null +++ b/libs/models/ai/ai_tool_auth.rs @@ -0,0 +1,28 @@ +use crate::{AiSessionId, DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_tool_auth")] +pub struct Model { + #[sea_orm(primary_key)] + pub session: AiSessionId, + #[sea_orm(primary_key)] + pub tool_call_id: String, + pub method: String, + pub arguments: String, + pub decision: bool, + pub reason: String, + pub decision_by: UserId, + pub decision_comment: Option, + pub logs: sea_orm::JsonValue, + pub expires_at: Option, + pub authorized_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/ai/ai_tool_call.rs b/libs/models/ai/ai_tool_call.rs new file mode 100644 index 0000000..4cf78b7 --- /dev/null +++ b/libs/models/ai/ai_tool_call.rs @@ -0,0 +1,37 @@ +use crate::{AiSessionId, DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ToolCallStatus; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "ai_tool_call")] +pub struct Model { + #[sea_orm(primary_key)] + pub tool_call_id: String, + #[sea_orm(primary_key)] + pub session: AiSessionId, + pub tool_name: String, + pub caller: UserId, + pub arguments: sea_orm::JsonValue, + pub result: sea_orm::JsonValue, + pub status: String, + pub execution_time_ms: Option, + pub error_message: Option, + pub error_stack: Option, + pub retry_count: i32, + pub created_at: DateTimeUtc, + pub completed_at: Option, + pub updated_at: DateTimeUtc, +} + +impl Model { + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/ai/mod.rs b/libs/models/ai/mod.rs new file mode 100644 index 0000000..d327084 --- /dev/null +++ b/libs/models/ai/mod.rs @@ -0,0 +1,46 @@ +use serde::{Deserialize, Serialize}; + +/// AI tool call execution status. Stored as `"pending"`, `"running"`, `"success"`, +/// `"failed"`, or `"retrying"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ToolCallStatus { + Pending, + Running, + Success, + Failed, + Retrying, +} + +impl std::fmt::Display for ToolCallStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ToolCallStatus::Pending => write!(f, "pending"), + ToolCallStatus::Running => write!(f, "running"), + ToolCallStatus::Success => write!(f, "success"), + ToolCallStatus::Failed => write!(f, "failed"), + ToolCallStatus::Retrying => write!(f, "retrying"), + } + } +} + +impl std::str::FromStr for ToolCallStatus { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "pending" => Ok(ToolCallStatus::Pending), + "running" => Ok(ToolCallStatus::Running), + "success" => Ok(ToolCallStatus::Success), + "failed" => Ok(ToolCallStatus::Failed), + "retrying" => Ok(ToolCallStatus::Retrying), + _ => Err("unknown tool call status"), + } + } +} + +pub use ai_session::Entity as AiSession; +pub use ai_tool_auth::Entity as AiToolAuth; +pub use ai_tool_call::Entity as AiToolCall; + +pub mod ai_session; +pub mod ai_tool_auth; +pub mod ai_tool_call; diff --git a/libs/models/issues/issue.rs b/libs/models/issues/issue.rs new file mode 100644 index 0000000..8313b49 --- /dev/null +++ b/libs/models/issues/issue.rs @@ -0,0 +1,39 @@ +use crate::{DateTimeUtc, IssueId, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::IssueState; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue")] +pub struct Model { + /// UUID for global uniqueness / API exposure. + #[sea_orm(primary_key)] + pub id: IssueId, + /// Project this issue belongs to. + pub project: ProjectId, + /// Sequential issue number within the project. Composite with `project` for uniqueness. + #[sea_orm(primary_key)] + pub number: i64, + pub title: String, + pub body: Option, + /// `"open"` or `"closed"`. + pub state: String, + pub author: UserId, + pub milestone: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + pub closed_at: Option, + pub created_by_ai: bool, +} + +impl Model { + pub fn state_enum(&self) -> Result { + self.state.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_assignee.rs b/libs/models/issues/issue_assignee.rs new file mode 100644 index 0000000..a50ac10 --- /dev/null +++ b/libs/models/issues/issue_assignee.rs @@ -0,0 +1,18 @@ +use crate::{DateTimeUtc, IssueId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_assignee")] +pub struct Model { + #[sea_orm(primary_key)] + pub issue: IssueId, + #[sea_orm(primary_key)] + pub user: UserId, + pub assigned_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_comment.rs b/libs/models/issues/issue_comment.rs new file mode 100644 index 0000000..4b06993 --- /dev/null +++ b/libs/models/issues/issue_comment.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, IssueId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_comment")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub issue: IssueId, + pub author: UserId, + pub body: String, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_comment_reaction.rs b/libs/models/issues/issue_comment_reaction.rs new file mode 100644 index 0000000..b5d4bb3 --- /dev/null +++ b/libs/models/issues/issue_comment_reaction.rs @@ -0,0 +1,28 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ReactionType; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_comment_reaction")] +pub struct Model { + #[sea_orm(primary_key)] + pub comment: i64, + #[sea_orm(primary_key, column_name = "user_uuid")] + pub user: UserId, + #[sea_orm(primary_key)] + pub reaction: String, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn reaction_enum(&self) -> Result { + self.reaction.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_label.rs b/libs/models/issues/issue_label.rs new file mode 100644 index 0000000..eda090a --- /dev/null +++ b/libs/models/issues/issue_label.rs @@ -0,0 +1,18 @@ +use crate::{DateTimeUtc, IssueId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_label")] +pub struct Model { + #[sea_orm(primary_key)] + pub issue: IssueId, + #[sea_orm(primary_key)] + pub label: i64, + pub relation_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_pull_request.rs b/libs/models/issues/issue_pull_request.rs new file mode 100644 index 0000000..f5c5e87 --- /dev/null +++ b/libs/models/issues/issue_pull_request.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, IssueId, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_pull_request")] +pub struct Model { + #[sea_orm(primary_key)] + pub issue: IssueId, + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + pub relation_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_reaction.rs b/libs/models/issues/issue_reaction.rs new file mode 100644 index 0000000..58f90de --- /dev/null +++ b/libs/models/issues/issue_reaction.rs @@ -0,0 +1,28 @@ +use crate::{DateTimeUtc, IssueId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ReactionType; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_reaction")] +pub struct Model { + #[sea_orm(primary_key, column_name = "issue_uuid")] + pub issue: IssueId, + #[sea_orm(primary_key, column_name = "user_uuid")] + pub user: UserId, + #[sea_orm(primary_key)] + pub reaction: String, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn reaction_enum(&self) -> Result { + self.reaction.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_repo.rs b/libs/models/issues/issue_repo.rs new file mode 100644 index 0000000..77881f1 --- /dev/null +++ b/libs/models/issues/issue_repo.rs @@ -0,0 +1,18 @@ +use crate::{DateTimeUtc, IssueId, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_repo")] +pub struct Model { + #[sea_orm(primary_key)] + pub issue: IssueId, + #[sea_orm(primary_key)] + pub repo: RepoId, + pub relation_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/issue_subscriber.rs b/libs/models/issues/issue_subscriber.rs new file mode 100644 index 0000000..d615cdb --- /dev/null +++ b/libs/models/issues/issue_subscriber.rs @@ -0,0 +1,19 @@ +use crate::{DateTimeUtc, IssueId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "issue_subscriber")] +pub struct Model { + #[sea_orm(primary_key)] + pub issue: IssueId, + #[sea_orm(primary_key)] + pub user: UserId, + pub subscribed: bool, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/issues/mod.rs b/libs/models/issues/mod.rs new file mode 100644 index 0000000..64d4c65 --- /dev/null +++ b/libs/models/issues/mod.rs @@ -0,0 +1,81 @@ +use serde::{Deserialize, Serialize}; + +/// Issue state. Stored as `"open"` or `"closed"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum IssueState { + Open, + Closed, +} + +impl std::fmt::Display for IssueState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + IssueState::Open => write!(f, "open"), + IssueState::Closed => write!(f, "closed"), + } + } +} + +impl std::str::FromStr for IssueState { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "open" => Ok(IssueState::Open), + "closed" => Ok(IssueState::Closed), + _ => Err("unknown issue state"), + } + } +} + +/// Reaction / emoji type. Stored as `"thumbs_up"`, `"eyes"`, `"heart"`, `"party"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ReactionType { + ThumbsUp, + Eyes, + Heart, + Party, +} + +impl std::fmt::Display for ReactionType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ReactionType::ThumbsUp => write!(f, "thumbs_up"), + ReactionType::Eyes => write!(f, "eyes"), + ReactionType::Heart => write!(f, "heart"), + ReactionType::Party => write!(f, "party"), + } + } +} + +impl std::str::FromStr for ReactionType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "thumbs_up" => Ok(ReactionType::ThumbsUp), + "eyes" => Ok(ReactionType::Eyes), + "heart" => Ok(ReactionType::Heart), + "party" => Ok(ReactionType::Party), + _ => Err("unknown reaction type"), + } + } +} + +pub use issue::Entity as Issue; +pub use issue_assignee::Entity as IssueAssignee; +pub use issue_comment::Entity as IssueComment; +pub use issue_comment_reaction::Entity as IssueCommentReaction; +pub use issue_label::Entity as IssueLabel; +pub use issue_pull_request::Entity as IssuePullRequest; +pub use issue_reaction::Entity as IssueReaction; +pub use issue_repo::Entity as IssueRepo; +pub use issue_subscriber::Entity as IssueSubscriber; + +pub mod issue; +pub mod issue_assignee; +pub mod issue_comment; +pub mod issue_comment_reaction; +pub mod issue_label; +pub mod issue_pull_request; +pub mod issue_reaction; +pub mod issue_repo; +pub mod issue_subscriber; diff --git a/libs/models/lib.rs b/libs/models/lib.rs new file mode 100644 index 0000000..94b9838 --- /dev/null +++ b/libs/models/lib.rs @@ -0,0 +1,37 @@ +pub use sea_orm::entity::prelude::*; + +pub mod agents; +pub mod agent_task; +pub mod ai; +pub mod issues; +pub mod projects; +pub mod pull_request; +pub mod repos; +pub mod rooms; +pub mod system; +pub mod users; +pub mod workspaces; +pub use chrono::Utc as UtcClock; + +pub use workspaces::{Workspace, WorkspaceRole}; +pub type WorkspaceId = Uuid; + +pub use agent_task::{AgentType, TaskStatus}; +pub type AgentTaskId = i64; +pub type UserId = Uuid; +pub type ProjectId = Uuid; +pub type RepoId = Uuid; +pub type RepoUpStreamId = Uuid; +pub type IssueId = Uuid; +pub type PullRequestId = Uuid; +pub type ModelId = Uuid; +pub type ModelVersionId = Uuid; +pub type ModelProviderId = Uuid; +pub type RoomId = Uuid; +pub type RoomCategoryId = Uuid; +pub type MessageId = Uuid; +pub type RoomThreadId = Uuid; +pub type AiSessionId = Uuid; +pub type Seq = i64; +pub type LabelId = i64; +pub type DateTimeUtc = chrono::DateTime; diff --git a/libs/models/projects/mod.rs b/libs/models/projects/mod.rs new file mode 100644 index 0000000..76fee56 --- /dev/null +++ b/libs/models/projects/mod.rs @@ -0,0 +1,73 @@ +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +/// Project member role. Stored as `"owner"`, `"admin"`, or `"member"` in the database. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, ToSchema)] +pub enum MemberRole { + Owner, + Admin, + Member, +} + +impl std::fmt::Display for MemberRole { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MemberRole::Owner => write!(f, "owner"), + MemberRole::Admin => write!(f, "admin"), + MemberRole::Member => write!(f, "member"), + } + } +} + +impl std::str::FromStr for MemberRole { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "owner" => Ok(MemberRole::Owner), + "admin" => Ok(MemberRole::Admin), + "member" => Ok(MemberRole::Member), + _ => Err("unknown member role"), + } + } +} + +pub use project::Entity as Project; +pub use project_access_log::Entity as ProjectAccessLog; +pub use project_activity::Entity as ProjectActivity; +pub use project_audit_log::Entity as ProjectAuditLog; +pub use project_billing::Entity as ProjectBilling; +pub use project_billing_history::Entity as ProjectBillingHistory; +pub use project_board::Entity as ProjectBoard; +pub use project_board_card::Entity as ProjectBoardCard; +pub use project_board_column::Entity as ProjectBoardColumn; +pub use project_follow::Entity as ProjectFollow; +pub use project_history_name::Entity as ProjectHistoryName; +pub use project_label::Entity as ProjectLabel; +pub use project_skill::{Entity as ProjectSkill, SkillMetadata, SkillSource}; +pub mod project_skill; +pub use project_member_invitations::Entity as ProjectMemberInvitations; +pub use project_member_join_answers::Entity as ProjectMemberJoinAnswers; +pub use project_member_join_request::Entity as ProjectMemberJoinRequest; +pub use project_member_join_settings::Entity as ProjectMemberJoinSettings; +pub use project_members::Entity as ProjectMember; +pub use project_watch::Entity as ProjectWatch; + +pub mod project; +pub mod project_access_log; +pub mod project_activity; +pub mod project_audit_log; +pub mod project_billing; +pub mod project_billing_history; +pub mod project_board; +pub mod project_board_card; +pub mod project_board_column; +pub mod project_follow; +pub mod project_history_name; +pub mod project_label; +pub mod project_like; +pub mod project_member_invitations; +pub mod project_member_join_answers; +pub mod project_member_join_request; +pub mod project_member_join_settings; +pub mod project_members; +pub mod project_watch; diff --git a/libs/models/projects/project.rs b/libs/models/projects/project.rs new file mode 100644 index 0000000..a6371c4 --- /dev/null +++ b/libs/models/projects/project.rs @@ -0,0 +1,24 @@ +use crate::{DateTimeUtc, ProjectId, UserId, WorkspaceId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ProjectId, + pub name: String, + pub display_name: String, + pub avatar_url: Option, + pub description: Option, + pub is_public: bool, + pub created_by: UserId, + pub workspace_id: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_access_log.rs b/libs/models/projects/project_access_log.rs new file mode 100644 index 0000000..9dba021 --- /dev/null +++ b/libs/models/projects/project_access_log.rs @@ -0,0 +1,63 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"create"`, `"read"`, `"update"`, `"delete"`, `"transfer"` etc. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum AccessAction { + Create, + Read, + Update, + Delete, + Transfer, + Invite, + RemoveMember, +} + +impl std::fmt::Display for AccessAction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AccessAction::Create => write!(f, "create"), + AccessAction::Read => write!(f, "read"), + AccessAction::Update => write!(f, "update"), + AccessAction::Delete => write!(f, "delete"), + AccessAction::Transfer => write!(f, "transfer"), + AccessAction::Invite => write!(f, "invite"), + AccessAction::RemoveMember => write!(f, "remove_member"), + } + } +} + +impl std::str::FromStr for AccessAction { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "create" => Ok(AccessAction::Create), + "read" => Ok(AccessAction::Read), + "update" => Ok(AccessAction::Update), + "delete" => Ok(AccessAction::Delete), + "transfer" => Ok(AccessAction::Transfer), + "invite" => Ok(AccessAction::Invite), + "remove_member" => Ok(AccessAction::RemoveMember), + _ => Err("unknown access action"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_access_log")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub actor_uid: Option, + pub action: String, + pub ip_address: Option, + pub user_agent: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_activity.rs b/libs/models/projects/project_activity.rs new file mode 100644 index 0000000..ed0db8f --- /dev/null +++ b/libs/models/projects/project_activity.rs @@ -0,0 +1,195 @@ +use crate::{DateTimeUtc, ProjectId, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// All possible event types for project activity feed. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ActivityEventType { + // Git / Repo + CommitPush, + BranchCreate, + BranchDelete, + BranchRename, + TagCreate, + TagDelete, + RepoCreate, + RepoUpdate, + RepoDelete, + RepoStar, + RepoUnstar, + RepoWatch, + RepoUnwatch, + // Issues + IssueOpen, + IssueClose, + IssueReopen, + IssueUpdate, + IssueDelete, + IssueComment, + IssueLabelAdd, + IssueLabelRemove, + IssueAssigneeAdd, + IssueAssigneeRemove, + // Pull Requests + PrOpen, + PrMerge, + PrClose, + PrUpdate, + PrReview, + PrReviewComment, + // Room + RoomMessage, + RoomCreate, + RoomDelete, + RoomUpdate, + RoomPin, + RoomThread, + // Project + ProjectStar, + ProjectUnstar, + ProjectWatch, + ProjectUnwatch, + MemberAdd, + MemberRemove, + MemberRoleChange, + LabelCreate, + LabelUpdate, + LabelDelete, +} + +impl std::fmt::Display for ActivityEventType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + ActivityEventType::CommitPush => "commit_push", + ActivityEventType::BranchCreate => "branch_create", + ActivityEventType::BranchDelete => "branch_delete", + ActivityEventType::BranchRename => "branch_rename", + ActivityEventType::TagCreate => "tag_create", + ActivityEventType::TagDelete => "tag_delete", + ActivityEventType::RepoCreate => "repo_create", + ActivityEventType::RepoUpdate => "repo_update", + ActivityEventType::RepoDelete => "repo_delete", + ActivityEventType::RepoStar => "repo_star", + ActivityEventType::RepoUnstar => "repo_unstar", + ActivityEventType::RepoWatch => "repo_watch", + ActivityEventType::RepoUnwatch => "repo_unwatch", + ActivityEventType::IssueOpen => "issue_open", + ActivityEventType::IssueClose => "issue_close", + ActivityEventType::IssueReopen => "issue_reopen", + ActivityEventType::IssueUpdate => "issue_update", + ActivityEventType::IssueDelete => "issue_delete", + ActivityEventType::IssueComment => "issue_comment", + ActivityEventType::IssueLabelAdd => "issue_label_add", + ActivityEventType::IssueLabelRemove => "issue_label_remove", + ActivityEventType::IssueAssigneeAdd => "issue_assignee_add", + ActivityEventType::IssueAssigneeRemove => "issue_assignee_remove", + ActivityEventType::PrOpen => "pr_open", + ActivityEventType::PrMerge => "pr_merge", + ActivityEventType::PrClose => "pr_close", + ActivityEventType::PrUpdate => "pr_update", + ActivityEventType::PrReview => "pr_review", + ActivityEventType::PrReviewComment => "pr_review_comment", + ActivityEventType::RoomMessage => "room_message", + ActivityEventType::RoomCreate => "room_create", + ActivityEventType::RoomDelete => "room_delete", + ActivityEventType::RoomUpdate => "room_update", + ActivityEventType::RoomPin => "room_pin", + ActivityEventType::RoomThread => "room_thread", + ActivityEventType::ProjectStar => "project_star", + ActivityEventType::ProjectUnstar => "project_unstar", + ActivityEventType::ProjectWatch => "project_watch", + ActivityEventType::ProjectUnwatch => "project_unwatch", + ActivityEventType::MemberAdd => "member_add", + ActivityEventType::MemberRemove => "member_remove", + ActivityEventType::MemberRoleChange => "member_role_change", + ActivityEventType::LabelCreate => "label_create", + ActivityEventType::LabelUpdate => "label_update", + ActivityEventType::LabelDelete => "label_delete", + }; + write!(f, "{}", s) + } +} + +impl std::str::FromStr for ActivityEventType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "commit_push" => Ok(ActivityEventType::CommitPush), + "branch_create" => Ok(ActivityEventType::BranchCreate), + "branch_delete" => Ok(ActivityEventType::BranchDelete), + "branch_rename" => Ok(ActivityEventType::BranchRename), + "tag_create" => Ok(ActivityEventType::TagCreate), + "tag_delete" => Ok(ActivityEventType::TagDelete), + "repo_create" => Ok(ActivityEventType::RepoCreate), + "repo_update" => Ok(ActivityEventType::RepoUpdate), + "repo_delete" => Ok(ActivityEventType::RepoDelete), + "repo_star" => Ok(ActivityEventType::RepoStar), + "repo_unstar" => Ok(ActivityEventType::RepoUnstar), + "repo_watch" => Ok(ActivityEventType::RepoWatch), + "repo_unwatch" => Ok(ActivityEventType::RepoUnwatch), + "issue_open" => Ok(ActivityEventType::IssueOpen), + "issue_close" => Ok(ActivityEventType::IssueClose), + "issue_reopen" => Ok(ActivityEventType::IssueReopen), + "issue_update" => Ok(ActivityEventType::IssueUpdate), + "issue_delete" => Ok(ActivityEventType::IssueDelete), + "issue_comment" => Ok(ActivityEventType::IssueComment), + "issue_label_add" => Ok(ActivityEventType::IssueLabelAdd), + "issue_label_remove" => Ok(ActivityEventType::IssueLabelRemove), + "issue_assignee_add" => Ok(ActivityEventType::IssueAssigneeAdd), + "issue_assignee_remove" => Ok(ActivityEventType::IssueAssigneeRemove), + "pr_open" => Ok(ActivityEventType::PrOpen), + "pr_merge" => Ok(ActivityEventType::PrMerge), + "pr_close" => Ok(ActivityEventType::PrClose), + "pr_update" => Ok(ActivityEventType::PrUpdate), + "pr_review" => Ok(ActivityEventType::PrReview), + "pr_review_comment" => Ok(ActivityEventType::PrReviewComment), + "room_message" => Ok(ActivityEventType::RoomMessage), + "room_create" => Ok(ActivityEventType::RoomCreate), + "room_delete" => Ok(ActivityEventType::RoomDelete), + "room_update" => Ok(ActivityEventType::RoomUpdate), + "room_pin" => Ok(ActivityEventType::RoomPin), + "room_thread" => Ok(ActivityEventType::RoomThread), + "project_star" => Ok(ActivityEventType::ProjectStar), + "project_unstar" => Ok(ActivityEventType::ProjectUnstar), + "project_watch" => Ok(ActivityEventType::ProjectWatch), + "project_unwatch" => Ok(ActivityEventType::ProjectUnwatch), + "member_add" => Ok(ActivityEventType::MemberAdd), + "member_remove" => Ok(ActivityEventType::MemberRemove), + "member_role_change" => Ok(ActivityEventType::MemberRoleChange), + "label_create" => Ok(ActivityEventType::LabelCreate), + "label_update" => Ok(ActivityEventType::LabelUpdate), + "label_delete" => Ok(ActivityEventType::LabelDelete), + _ => Err("unknown activity event type"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_activity")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = true)] + pub id: i64, + pub project: ProjectId, + #[sea_orm(nullable)] + pub repo: Option, + pub actor: UserId, + #[sea_orm(column_type = "Text")] + pub event_type: String, + #[sea_orm(nullable)] + pub event_id: Option, + #[sea_orm(nullable)] + pub event_sub_id: Option, + #[sea_orm(column_type = "Text")] + pub title: String, + #[sea_orm(nullable, column_type = "Text")] + pub content: Option, + #[sea_orm(nullable, column_type = "JsonBinary")] + pub metadata: Option, + pub is_private: bool, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_audit_log.rs b/libs/models/projects/project_audit_log.rs new file mode 100644 index 0000000..2bbe1e3 --- /dev/null +++ b/libs/models/projects/project_audit_log.rs @@ -0,0 +1,63 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Audit action types, stored as strings like `"create"`, `"update"`, `"delete"`, `"transfer"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum AuditAction { + Create, + Update, + Delete, + Transfer, + Rename, + SettingsChange, +} + +impl std::fmt::Display for AuditAction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AuditAction::Create => write!(f, "create"), + AuditAction::Update => write!(f, "update"), + AuditAction::Delete => write!(f, "delete"), + AuditAction::Transfer => write!(f, "transfer"), + AuditAction::Rename => write!(f, "rename"), + AuditAction::SettingsChange => write!(f, "settings_change"), + } + } +} + +impl std::str::FromStr for AuditAction { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "create" => Ok(AuditAction::Create), + "update" => Ok(AuditAction::Update), + "delete" => Ok(AuditAction::Delete), + "transfer" => Ok(AuditAction::Transfer), + "rename" => Ok(AuditAction::Rename), + "settings_change" => Ok(AuditAction::SettingsChange), + _ => Err("unknown audit action"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_audit_log")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = true)] + pub id: i64, + pub project: ProjectId, + pub actor: UserId, + #[sea_orm(column_type = "Text")] + pub action: String, + #[sea_orm(column_type = "JsonBinary", nullable)] + pub details: Option, + pub ip_address: Option, + pub user_agent: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_billing.rs b/libs/models/projects/project_billing.rs new file mode 100644 index 0000000..3d3e5e2 --- /dev/null +++ b/libs/models/projects/project_billing.rs @@ -0,0 +1,25 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Per-project billing account holding the current balance. +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_billing")] +pub struct Model { + #[sea_orm(primary_key)] + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub balance: Decimal, + #[sea_orm(column_type = "Text")] + pub currency: String, + #[sea_orm(column_name = "user_uuid")] + pub user: Option, + pub updated_at: DateTimeUtc, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_billing_history.rs b/libs/models/projects/project_billing_history.rs new file mode 100644 index 0000000..c456aee --- /dev/null +++ b/libs/models/projects/project_billing_history.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Billing transaction history for a project. +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_billing_history")] +pub struct Model { + #[sea_orm(primary_key)] + pub uid: Uuid, + pub project: ProjectId, + pub user: Option, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub amount: Decimal, + #[sea_orm(column_type = "Text")] + pub currency: String, + #[sea_orm(column_type = "Text")] + pub reason: String, + #[sea_orm(column_type = "JsonBinary", nullable)] + pub extra: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_board.rs b/libs/models/projects/project_board.rs new file mode 100644 index 0000000..7ceff66 --- /dev/null +++ b/libs/models/projects/project_board.rs @@ -0,0 +1,22 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_board")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + pub name: String, + pub description: Option, + pub created_by: UserId, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_board_card.rs b/libs/models/projects/project_board_card.rs new file mode 100644 index 0000000..7351f77 --- /dev/null +++ b/libs/models/projects/project_board_card.rs @@ -0,0 +1,28 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_board_card")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + #[sea_orm(column_name = "column_uuid")] + pub column: Uuid, + pub issue_id: Option, + pub project: Option, + pub title: String, + pub description: Option, + pub position: i32, + pub assignee_id: Option, + pub due_date: Option, + pub priority: Option, + pub created_by: UserId, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_board_column.rs b/libs/models/projects/project_board_column.rs new file mode 100644 index 0000000..75259a9 --- /dev/null +++ b/libs/models/projects/project_board_column.rs @@ -0,0 +1,20 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_board_column")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + #[sea_orm(column_name = "board_uuid")] + pub board: Uuid, + pub name: String, + pub position: i32, + pub wip_limit: Option, + pub color: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_follow.rs b/libs/models/projects/project_follow.rs new file mode 100644 index 0000000..047fb79 --- /dev/null +++ b/libs/models/projects/project_follow.rs @@ -0,0 +1,18 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_follow")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub user: UserId, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_history_name.rs b/libs/models/projects/project_history_name.rs new file mode 100644 index 0000000..5745dab --- /dev/null +++ b/libs/models/projects/project_history_name.rs @@ -0,0 +1,18 @@ +use crate::{DateTimeUtc, ProjectId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_history_name")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project_uid: ProjectId, + pub history_name: String, + pub changed_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_label.rs b/libs/models/projects/project_label.rs new file mode 100644 index 0000000..52c8eb9 --- /dev/null +++ b/libs/models/projects/project_label.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, ProjectId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_label")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + #[sea_orm(column_name = "label_id")] + pub label: i64, + pub relation_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_like.rs b/libs/models/projects/project_like.rs new file mode 100644 index 0000000..dd6f94f --- /dev/null +++ b/libs/models/projects/project_like.rs @@ -0,0 +1,16 @@ +use crate::{ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[sea_orm::model] +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_like")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub project: ProjectId, + #[sea_orm(primary_key, auto_increment = false)] + pub user: UserId, + pub created_at: DateTimeUtc, +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_member_invitations.rs b/libs/models/projects/project_member_invitations.rs new file mode 100644 index 0000000..5a63966 --- /dev/null +++ b/libs/models/projects/project_member_invitations.rs @@ -0,0 +1,32 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::MemberRole; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_member_invitations")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub user: UserId, + pub invited_by: UserId, + pub scope: String, + pub accepted: bool, + pub accepted_at: Option, + pub rejected: bool, + pub rejected_at: Option, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn scope_role(&self) -> Result { + self.scope.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_member_join_answers.rs b/libs/models/projects/project_member_join_answers.rs new file mode 100644 index 0000000..8314fc1 --- /dev/null +++ b/libs/models/projects/project_member_join_answers.rs @@ -0,0 +1,21 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_member_join_answers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub user: UserId, + pub request_id: i64, + pub question: String, + pub answer: String, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_member_join_request.rs b/libs/models/projects/project_member_join_request.rs new file mode 100644 index 0000000..3a8536f --- /dev/null +++ b/libs/models/projects/project_member_join_request.rs @@ -0,0 +1,57 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"pending"`, `"approved"`, `"rejected"`, or `"cancelled"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum JoinRequestStatus { + Pending, + Approved, + Rejected, + Cancelled, +} + +impl std::fmt::Display for JoinRequestStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + JoinRequestStatus::Pending => write!(f, "pending"), + JoinRequestStatus::Approved => write!(f, "approved"), + JoinRequestStatus::Rejected => write!(f, "rejected"), + JoinRequestStatus::Cancelled => write!(f, "cancelled"), + } + } +} + +impl std::str::FromStr for JoinRequestStatus { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "pending" => Ok(JoinRequestStatus::Pending), + "approved" => Ok(JoinRequestStatus::Approved), + "rejected" => Ok(JoinRequestStatus::Rejected), + "cancelled" => Ok(JoinRequestStatus::Cancelled), + _ => Err("unknown join request status"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_member_join_request")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub user: UserId, + pub status: String, + pub message: Option, + pub processed_by: Option, + pub processed_at: Option, + pub reject_reason: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_member_join_settings.rs b/libs/models/projects/project_member_join_settings.rs new file mode 100644 index 0000000..352ca4e --- /dev/null +++ b/libs/models/projects/project_member_join_settings.rs @@ -0,0 +1,22 @@ +use crate::{DateTimeUtc, ProjectId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_member_join_settings")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub require_approval: bool, + pub require_questions: bool, + pub questions: JsonValue, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_members.rs b/libs/models/projects/project_members.rs new file mode 100644 index 0000000..32b7ce7 --- /dev/null +++ b/libs/models/projects/project_members.rs @@ -0,0 +1,29 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::MemberRole; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_members")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + #[sea_orm(column_name = "user_uuid")] + pub user: UserId, + pub scope: String, + pub joined_at: DateTimeUtc, +} + +impl Model { + pub fn scope_role(&self) -> Result { + self.scope.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/projects/project_skill.rs b/libs/models/projects/project_skill.rs new file mode 100644 index 0000000..0a8c004 --- /dev/null +++ b/libs/models/projects/project_skill.rs @@ -0,0 +1,115 @@ +//! Skill registered to a project. +//! +//! Skills can be sourced manually (by project admin) or auto-discovered from +//! repositories within the project (via `.claude/skills/` directory scanning). + +use crate::{DateTimeUtc, ProjectId, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Skill source: `"manual"` or `"repo"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum SkillSource { + Manual, + Repo, +} + +impl std::fmt::Display for SkillSource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SkillSource::Manual => write!(f, "manual"), + SkillSource::Repo => write!(f, "repo"), + } + } +} + +impl std::str::FromStr for SkillSource { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "manual" => Ok(SkillSource::Manual), + "repo" => Ok(SkillSource::Repo), + _ => Err("unknown skill source"), + } + } +} + +/// Parsed frontmatter from a skill's SKILL.md file. +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct SkillMetadata { + /// Human-readable name (falls back to slug/folder name). + #[serde(default)] + pub name: Option, + /// Short description of what the skill does. + #[serde(default)] + pub description: Option, + /// SPDX license identifier. + #[serde(default)] + pub license: Option, + /// Compatibility notes (e.g. "Requires openspec CLI"). + #[serde(default)] + pub compatibility: Option, + /// Free-form metadata from the frontmatter. + #[serde(default)] + pub metadata: serde_json::Value, +} + +impl From for SkillMetadata { + fn from(v: serde_json::Value) -> Self { + serde_json::from_value(v).unwrap_or_default() + } +} + +/// Skill record persisted in the `project_skill` table. +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_skill")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + /// Project this skill belongs to. + pub project_uuid: ProjectId, + /// URL-safe identifier, unique within a project. + pub slug: String, + /// Display name (extracted from frontmatter or folder name). + pub name: String, + /// Optional short description. + pub description: Option, + /// `"manual"` or `"repo"`. + pub source: String, + /// If source=repo, the repo this skill was discovered from. + #[sea_orm(nullable)] + pub repo_id: Option, + /// If source=repo, the commit SHA where the skill was found. + #[sea_orm(nullable)] + pub commit_sha: Option, + /// If source=repo, the blob SHA of the SKILL.md file. + #[sea_orm(nullable)] + pub blob_hash: Option, + /// Raw markdown content (SKILL.md body after frontmatter). + pub content: String, + /// Full frontmatter as JSON. + #[sea_orm(column_type = "JsonBinary")] + pub metadata: serde_json::Value, + /// Whether this skill is currently active. + pub enabled: bool, + /// Who added this skill (null for repo-sourced skills). + #[sea_orm(nullable)] + pub created_by: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} + +impl Model { + pub fn source_enum(&self) -> Result { + self.source.parse() + } + + pub fn metadata_parsed(&self) -> SkillMetadata { + SkillMetadata::from(self.metadata.clone()) + } +} diff --git a/libs/models/projects/project_watch.rs b/libs/models/projects/project_watch.rs new file mode 100644 index 0000000..ca2794f --- /dev/null +++ b/libs/models/projects/project_watch.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, ProjectId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "project_watch")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub project: ProjectId, + pub user: UserId, + pub notifications_enabled: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/pull_request/mod.rs b/libs/models/pull_request/mod.rs new file mode 100644 index 0000000..3d3ea48 --- /dev/null +++ b/libs/models/pull_request/mod.rs @@ -0,0 +1,93 @@ +use serde::{Deserialize, Serialize}; + +/// Overall PR status. Combines review decision and merge readiness into a +/// single enum for quick querying. +/// +/// Stored as `"draft"`, `"open"`, `"approved"`, `"changes_requested"`, +/// `"conflict"`, `"merged"`, or `"closed"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum PrStatus { + Draft, + Open, + Approved, + ChangesRequested, + Conflict, + Merged, + Closed, +} + +impl std::fmt::Display for PrStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PrStatus::Draft => write!(f, "draft"), + PrStatus::Open => write!(f, "open"), + PrStatus::Approved => write!(f, "approved"), + PrStatus::ChangesRequested => write!(f, "changes_requested"), + PrStatus::Conflict => write!(f, "conflict"), + PrStatus::Merged => write!(f, "merged"), + PrStatus::Closed => write!(f, "closed"), + } + } +} + +impl std::str::FromStr for PrStatus { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "draft" => Ok(PrStatus::Draft), + "open" => Ok(PrStatus::Open), + "approved" => Ok(PrStatus::Approved), + "changes_requested" => Ok(PrStatus::ChangesRequested), + "conflict" => Ok(PrStatus::Conflict), + "merged" => Ok(PrStatus::Merged), + "closed" => Ok(PrStatus::Closed), + _ => Err("unknown PR status"), + } + } +} + +/// Overall review decision from a single reviewer. Stored as `"pending"`, +/// `"approved"`, `"changes_requested"`, or `"comment"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum ReviewState { + Pending, + Approved, + ChangesRequested, + Comment, +} + +impl std::fmt::Display for ReviewState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ReviewState::Pending => write!(f, "pending"), + ReviewState::Approved => write!(f, "approved"), + ReviewState::ChangesRequested => write!(f, "changes_requested"), + ReviewState::Comment => write!(f, "comment"), + } + } +} + +impl std::str::FromStr for ReviewState { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "pending" => Ok(ReviewState::Pending), + "approved" => Ok(ReviewState::Approved), + "changes_requested" => Ok(ReviewState::ChangesRequested), + "comment" => Ok(ReviewState::Comment), + _ => Err("unknown review state"), + } + } +} + +pub use pull_request::Entity as PullRequest; +pub use pull_request_commit::Entity as PullRequestCommit; +pub use pull_request_review::Entity as PullRequestReview; +pub use pull_request_review_comment::Entity as PullRequestReviewComment; +pub use pull_request_review_request::Entity as PullRequestReviewRequest; + +pub mod pull_request; +pub mod pull_request_commit; +pub mod pull_request_review; +pub mod pull_request_review_comment; +pub mod pull_request_review_request; diff --git a/libs/models/pull_request/pull_request.rs b/libs/models/pull_request/pull_request.rs new file mode 100644 index 0000000..f878069 --- /dev/null +++ b/libs/models/pull_request/pull_request.rs @@ -0,0 +1,37 @@ +use crate::{DateTimeUtc, IssueId, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::PrStatus; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "pull_request")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + pub issue: IssueId, + pub title: String, + pub body: Option, + pub author: UserId, + pub base: String, + pub head: String, + pub status: String, + pub merged_by: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + pub merged_at: Option, + pub created_by_ai: bool, +} + +impl Model { + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/pull_request/pull_request_commit.rs b/libs/models/pull_request/pull_request_commit.rs new file mode 100644 index 0000000..95b6ccd --- /dev/null +++ b/libs/models/pull_request/pull_request_commit.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "pull_request_commit")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + #[sea_orm(primary_key)] + pub commit: String, + pub message: String, + pub author_name: String, + pub author_email: String, + pub authored_at: DateTimeUtc, + pub committer_name: String, + pub committer_email: String, + pub committed_at: DateTimeUtc, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/pull_request/pull_request_review.rs b/libs/models/pull_request/pull_request_review.rs new file mode 100644 index 0000000..58af978 --- /dev/null +++ b/libs/models/pull_request/pull_request_review.rs @@ -0,0 +1,32 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::ReviewState; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "pull_request_review")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + #[sea_orm(primary_key)] + pub reviewer: UserId, + pub state: String, + pub body: Option, + pub submitted_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +impl Model { + pub fn state_enum(&self) -> Result { + self.state.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/pull_request/pull_request_review_comment.rs b/libs/models/pull_request/pull_request_review_comment.rs new file mode 100644 index 0000000..e6659d5 --- /dev/null +++ b/libs/models/pull_request/pull_request_review_comment.rs @@ -0,0 +1,37 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "pull_request_review_comment")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + #[sea_orm(primary_key)] + pub id: i64, + /// Optional reviewer association. + pub review: Option, + /// File path for inline comments. + pub path: Option, + /// "LEFT" or "RIGHT" for side-by-side positioning. + pub side: Option, + /// Line number in the new (right) file. + pub line: Option, + /// Line number in the old (left) file. + pub old_line: Option, + pub body: String, + pub author: UserId, + /// Whether this comment thread has been resolved. + pub resolved: bool, + /// ID of the parent comment this replies to (null = root comment). + pub in_reply_to: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/pull_request/pull_request_review_request.rs b/libs/models/pull_request/pull_request_review_request.rs new file mode 100644 index 0000000..a60f229 --- /dev/null +++ b/libs/models/pull_request/pull_request_review_request.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Tracks a review request: a PR author has asked a specific user to review. +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "pull_request_review_request")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub number: i64, + #[sea_orm(primary_key)] + pub reviewer: UserId, + /// Who requested this review. + pub requested_by: UserId, + pub requested_at: DateTimeUtc, + /// When the reviewer was dismissed (null = still pending). + pub dismissed_at: Option, + /// Who dismissed the request (null if not dismissed). + pub dismissed_by: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/mod.rs b/libs/models/repos/mod.rs new file mode 100644 index 0000000..20fdd90 --- /dev/null +++ b/libs/models/repos/mod.rs @@ -0,0 +1,155 @@ +use serde::{Deserialize, Serialize}; + +/// Repository collaborator role. Stored as `"read"`, `"write"`, or `"admin"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum CollabRole { + Read, + Write, + Admin, +} + +impl std::fmt::Display for CollabRole { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CollabRole::Read => write!(f, "read"), + CollabRole::Write => write!(f, "write"), + CollabRole::Admin => write!(f, "admin"), + } + } +} + +impl std::str::FromStr for CollabRole { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "read" => Ok(CollabRole::Read), + "write" => Ok(CollabRole::Write), + "admin" => Ok(CollabRole::Admin), + _ => Err("unknown collaborator role"), + } + } +} + +/// LFS lock type. Stored as `"rd"`, `"rn"` or `"rw"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum LockType { + Rd, + Rn, + Rw, +} + +impl std::fmt::Display for LockType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LockType::Rd => write!(f, "rd"), + LockType::Rn => write!(f, "rn"), + LockType::Rw => write!(f, "rw"), + } + } +} + +impl std::str::FromStr for LockType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "rd" => Ok(LockType::Rd), + "rn" => Ok(LockType::Rn), + "rw" => Ok(LockType::Rw), + _ => Err("unknown lock type"), + } + } +} + +/// Upstream sync direction. Stored as `"push"` or `"pull"`. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum SyncDirection { + Push, + Pull, +} + +impl std::fmt::Display for SyncDirection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SyncDirection::Push => write!(f, "push"), + SyncDirection::Pull => write!(f, "pull"), + } + } +} + +impl std::str::FromStr for SyncDirection { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "push" => Ok(SyncDirection::Push), + "pull" => Ok(SyncDirection::Pull), + _ => Err("unknown sync direction"), + } + } +} + +/// Upstream sync status. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum SyncStatus { + Idle, + Syncing, + Success, + Failed, +} + +impl std::fmt::Display for SyncStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SyncStatus::Idle => write!(f, "idle"), + SyncStatus::Syncing => write!(f, "syncing"), + SyncStatus::Success => write!(f, "success"), + SyncStatus::Failed => write!(f, "failed"), + } + } +} + +impl std::str::FromStr for SyncStatus { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "idle" => Ok(SyncStatus::Idle), + "syncing" => Ok(SyncStatus::Syncing), + "success" => Ok(SyncStatus::Success), + "failed" => Ok(SyncStatus::Failed), + _ => Err("unknown sync status"), + } + } +} + +pub use repo::Entity as Repo; +pub use repo_branch::Entity as RepoBranch; +pub use repo_branch_protect::Entity as RepoBranchProtect; +pub use repo_collaborator::Entity as RepoCollaborator; +pub use repo_commit::Entity as RepoCommit; +pub use repo_fork::Entity as RepoFork; +pub use repo_history_name::Entity as RepoHistoryName; +pub use repo_hook::Entity as RepoHook; +pub use repo_lfs_lock::Entity as RepoLfsLock; +pub use repo_lfs_object::Entity as RepoLfsObject; +pub use repo_lock::Entity as RepoLock; +pub use repo_star::Entity as RepoStar; +pub use repo_tag::Entity as RepoTag; +pub use repo_upstream::Entity as RepoUpstream; +pub use repo_watch::Entity as RepoWatch; +pub use repo_webhook::Entity as RepoWebhook; + +pub mod repo; +pub mod repo_branch; +pub mod repo_branch_protect; +pub mod repo_collaborator; +pub mod repo_commit; +pub mod repo_fork; +pub mod repo_history_name; +pub mod repo_hook; +pub mod repo_lfs_lock; +pub mod repo_lfs_object; +pub mod repo_lock; +pub mod repo_star; +pub mod repo_tag; +pub mod repo_upstream; +pub mod repo_watch; +pub mod repo_webhook; diff --git a/libs/models/repos/repo.rs b/libs/models/repos/repo.rs new file mode 100644 index 0000000..96ebe3d --- /dev/null +++ b/libs/models/repos/repo.rs @@ -0,0 +1,26 @@ +use crate::{DateTimeUtc, ProjectId, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RepoId, + pub repo_name: String, + pub project: ProjectId, + pub description: Option, + pub default_branch: String, + pub is_private: bool, + pub storage_path: String, + pub created_by: UserId, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + #[sea_orm(default)] + pub ai_code_review_enabled: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_branch.rs b/libs/models/repos/repo_branch.rs new file mode 100644 index 0000000..0fdd343 --- /dev/null +++ b/libs/models/repos/repo_branch.rs @@ -0,0 +1,22 @@ +use crate::{DateTimeUtc, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_branch")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub name: String, + pub oid: String, + pub upstream: Option, + pub head: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_branch_protect.rs b/libs/models/repos/repo_branch_protect.rs new file mode 100644 index 0000000..695f627 --- /dev/null +++ b/libs/models/repos/repo_branch_protect.rs @@ -0,0 +1,28 @@ +use crate::RepoId; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_branch_protect")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub branch: String, + pub forbid_push: bool, + pub forbid_pull: bool, + pub forbid_merge: bool, + pub forbid_deletion: bool, + pub forbid_force_push: bool, + pub forbid_tag_push: bool, + pub required_approvals: i32, + pub dismiss_stale_reviews: bool, + pub require_linear_history: bool, + pub allow_fork_syncing: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_collaborator.rs b/libs/models/repos/repo_collaborator.rs new file mode 100644 index 0000000..979b0f7 --- /dev/null +++ b/libs/models/repos/repo_collaborator.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::CollabRole; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_collaborator")] +pub struct Model { + #[sea_orm(primary_key)] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub user: UserId, + pub scope: String, + pub created_at: DateTimeUtc, +} + +impl Model { + pub fn scope_role(&self) -> Result { + self.scope.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_commit.rs b/libs/models/repos/repo_commit.rs new file mode 100644 index 0000000..b1fb016 --- /dev/null +++ b/libs/models/repos/repo_commit.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_commit")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub repo: RepoId, + pub oid: String, + pub author_name: String, + pub author_email: String, + pub author: Option, + pub commiter_name: String, + pub commiter_email: String, + pub commiter: Option, + pub message: String, + pub parent: JsonValue, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_fork.rs b/libs/models/repos/repo_fork.rs new file mode 100644 index 0000000..82e5b46 --- /dev/null +++ b/libs/models/repos/repo_fork.rs @@ -0,0 +1,19 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_fork")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub parent_repo: RepoId, + pub forked_repo: RepoId, + pub forked_by: UserId, + pub forked_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_history_name.rs b/libs/models/repos/repo_history_name.rs new file mode 100644 index 0000000..11487ab --- /dev/null +++ b/libs/models/repos/repo_history_name.rs @@ -0,0 +1,21 @@ +use crate::{DateTimeUtc, ProjectId, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_history_name")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + #[sea_orm(column_name = "project_uid")] + pub project: ProjectId, + pub name: String, + pub change_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_hook.rs b/libs/models/repos/repo_hook.rs new file mode 100644 index 0000000..25d6b40 --- /dev/null +++ b/libs/models/repos/repo_hook.rs @@ -0,0 +1,21 @@ +use crate::{DateTimeUtc, RepoId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_hook")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub event: JsonValue, + pub script: String, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_lfs_lock.rs b/libs/models/repos/repo_lfs_lock.rs new file mode 100644 index 0000000..0a32d62 --- /dev/null +++ b/libs/models/repos/repo_lfs_lock.rs @@ -0,0 +1,31 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::LockType; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_lfs_lock")] +pub struct Model { + #[sea_orm(primary_key)] + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub path: String, + pub lock_type: String, + #[sea_orm(column_name = "locked_by")] + pub locked_by: UserId, + pub locked_at: DateTimeUtc, + pub unlocked_at: Option, +} + +impl Model { + pub fn lock_type_enum(&self) -> Result { + self.lock_type.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_lfs_object.rs b/libs/models/repos/repo_lfs_object.rs new file mode 100644 index 0000000..6382087 --- /dev/null +++ b/libs/models/repos/repo_lfs_object.rs @@ -0,0 +1,23 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_lfs_object")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub oid: String, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub size: i64, + pub storage_path: String, + #[sea_orm(column_name = "uploaded_by")] + pub uploaded_by: Option, + pub uploaded_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_lock.rs b/libs/models/repos/repo_lock.rs new file mode 100644 index 0000000..31382e2 --- /dev/null +++ b/libs/models/repos/repo_lock.rs @@ -0,0 +1,31 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::LockType; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_lock")] +pub struct Model { + #[sea_orm(primary_key)] + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub path: String, + pub lock_type: String, + #[sea_orm(column_name = "locked_by")] + pub locked_by: UserId, + pub acquired_at: DateTimeUtc, + pub released_at: Option, +} + +impl Model { + pub fn lock_type_enum(&self) -> Result { + self.lock_type.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_star.rs b/libs/models/repos/repo_star.rs new file mode 100644 index 0000000..9df706d --- /dev/null +++ b/libs/models/repos/repo_star.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_star")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + #[sea_orm(column_name = "user_uuid")] + pub user: UserId, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_tag.rs b/libs/models/repos/repo_tag.rs new file mode 100644 index 0000000..4f5d18b --- /dev/null +++ b/libs/models/repos/repo_tag.rs @@ -0,0 +1,26 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_tag")] +pub struct Model { + #[sea_orm(primary_key)] + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + #[sea_orm(primary_key)] + pub name: String, + pub oid: String, + pub color: Option, + pub description: Option, + pub created_at: DateTimeUtc, + pub tagger_name: String, + pub tagger_email: String, + #[sea_orm(column_name = "tagger_uuid")] + pub tagger: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_upstream.rs b/libs/models/repos/repo_upstream.rs new file mode 100644 index 0000000..828c4f5 --- /dev/null +++ b/libs/models/repos/repo_upstream.rs @@ -0,0 +1,34 @@ +use crate::{DateTimeUtc, RepoId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_upstream")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub source_url: String, + pub direction: String, + pub schedule_cron: Option, + pub last_run_at: Option, + pub next_run_at: Option, + pub status: String, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +impl Model { + pub fn direction_enum(&self) -> Result { + self.direction.parse() + } + pub fn status_enum(&self) -> Result { + self.status.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_watch.rs b/libs/models/repos/repo_watch.rs new file mode 100644 index 0000000..048e3fa --- /dev/null +++ b/libs/models/repos/repo_watch.rs @@ -0,0 +1,23 @@ +use crate::{DateTimeUtc, RepoId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_watch")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "user_uuid")] + pub user: UserId, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub show_dashboard: bool, + pub notify_email: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/repos/repo_webhook.rs b/libs/models/repos/repo_webhook.rs new file mode 100644 index 0000000..2f457eb --- /dev/null +++ b/libs/models/repos/repo_webhook.rs @@ -0,0 +1,25 @@ +use crate::{DateTimeUtc, RepoId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "repo_webhook")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "repo_uuid")] + pub repo: RepoId, + pub event: JsonValue, + pub url: Option, + pub access_key: Option, + pub secret_key: Option, + pub created_at: DateTimeUtc, + pub last_delivered_at: Option, + pub touch_count: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/mod.rs b/libs/models/rooms/mod.rs new file mode 100644 index 0000000..8fcdac9 --- /dev/null +++ b/libs/models/rooms/mod.rs @@ -0,0 +1,135 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum RoomMemberRole { + #[sea_orm(string_value = "owner")] + Owner, + #[sea_orm(string_value = "admin")] + Admin, + #[sea_orm(string_value = "member")] + Member, + #[sea_orm(string_value = "guest")] + Guest, +} + +impl std::fmt::Display for RoomMemberRole { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + RoomMemberRole::Owner => write!(f, "owner"), + RoomMemberRole::Admin => write!(f, "admin"), + RoomMemberRole::Member => write!(f, "member"), + RoomMemberRole::Guest => write!(f, "guest"), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum MessageSenderType { + #[sea_orm(string_value = "member")] + Member, + #[sea_orm(string_value = "admin")] + Admin, + #[sea_orm(string_value = "owner")] + Owner, + #[sea_orm(string_value = "ai")] + Ai, + #[sea_orm(string_value = "system")] + System, + #[sea_orm(string_value = "tool")] + Tool, + #[sea_orm(string_value = "guest")] + Guest, +} + +impl std::fmt::Display for MessageSenderType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MessageSenderType::Member => write!(f, "member"), + MessageSenderType::Admin => write!(f, "admin"), + MessageSenderType::Owner => write!(f, "owner"), + MessageSenderType::Ai => write!(f, "ai"), + MessageSenderType::System => write!(f, "system"), + MessageSenderType::Tool => write!(f, "tool"), + MessageSenderType::Guest => write!(f, "guest"), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum MessageContentType { + #[sea_orm(string_value = "text")] + Text, + #[sea_orm(string_value = "image")] + Image, + #[sea_orm(string_value = "audio")] + Audio, + #[sea_orm(string_value = "video")] + Video, + #[sea_orm(string_value = "file")] + File, +} + +impl std::fmt::Display for MessageContentType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MessageContentType::Text => write!(f, "text"), + MessageContentType::Image => write!(f, "image"), + MessageContentType::Audio => write!(f, "audio"), + MessageContentType::Video => write!(f, "video"), + MessageContentType::File => write!(f, "file"), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +pub enum ToolCallStatus { + #[sea_orm(string_value = "pending")] + Pending, + #[sea_orm(string_value = "running")] + Running, + #[sea_orm(string_value = "success")] + Success, + #[sea_orm(string_value = "failed")] + Failed, + #[sea_orm(string_value = "retrying")] + Retrying, +} + +impl std::fmt::Display for ToolCallStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ToolCallStatus::Pending => write!(f, "pending"), + ToolCallStatus::Running => write!(f, "running"), + ToolCallStatus::Success => write!(f, "success"), + ToolCallStatus::Failed => write!(f, "failed"), + ToolCallStatus::Retrying => write!(f, "retrying"), + } + } +} + +pub use room::Entity as Room; +pub use room_ai::Entity as RoomAi; +pub use room_category::Entity as RoomCategory; +pub use room_member::Entity as RoomMember; +pub use room_message::Entity as RoomMessage; +pub use room_message_edit_history::Entity as RoomMessageEditHistory; +pub use room_message_reaction::Entity as RoomMessageReaction; +pub use room_notifications::Entity as RoomNotification; +pub use room_notifications::NotificationType; +pub use room_pin::Entity as RoomPin; +pub use room_thread::Entity as RoomThread; +pub mod room; +pub mod room_ai; +pub mod room_category; +pub mod room_member; +pub mod room_message; +pub mod room_message_edit_history; +pub mod room_message_reaction; +pub mod room_notifications; +pub mod room_pin; +pub mod room_thread; diff --git a/libs/models/rooms/room.rs b/libs/models/rooms/room.rs new file mode 100644 index 0000000..dbed65c --- /dev/null +++ b/libs/models/rooms/room.rs @@ -0,0 +1,76 @@ +use crate::{DateTimeUtc, ProjectId, RoomCategoryId, RoomId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RoomId, + pub project: ProjectId, + pub room_name: String, + #[sea_orm(column_name = "public")] + pub public: bool, + pub category: Option, + pub created_by: UserId, + pub created_at: DateTimeUtc, + pub last_msg_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room_category::Entity", + from = "Column::Category", + to = "super::room_category::Column::Id" + )] + Category, + #[sea_orm(has_many = "super::room_message::Entity")] + Messages, + #[sea_orm(has_many = "super::room_member::Entity")] + Members, + #[sea_orm(has_many = "super::room_thread::Entity")] + Threads, + #[sea_orm(has_many = "super::room_ai::Entity")] + Ais, + #[sea_orm(has_many = "super::room_pin::Entity")] + Pins, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Category.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Messages.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Members.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Threads.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Ais.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Pins.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_ai.rs b/libs/models/rooms/room_ai.rs new file mode 100644 index 0000000..eaebc19 --- /dev/null +++ b/libs/models/rooms/room_ai.rs @@ -0,0 +1,55 @@ +use crate::{DateTimeUtc, ModelId, ModelVersionId, RoomId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_ai")] +pub struct Model { + #[sea_orm(primary_key)] + pub room: RoomId, + #[sea_orm(primary_key)] + pub model: ModelId, + pub version: Option, + pub call_count: i64, + pub last_call_at: Option, + pub history_limit: Option, + pub system_prompt: Option, + pub temperature: Option, + pub max_tokens: Option, + pub use_exact: bool, + pub think: bool, + pub stream: bool, + pub min_score: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm( + belongs_to = "super::super::agents::model::Entity", + from = "Column::Model", + to = "super::super::agents::model::Column::Id" + )] + AiModel, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::AiModel.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_category.rs b/libs/models/rooms/room_category.rs new file mode 100644 index 0000000..d131dbc --- /dev/null +++ b/libs/models/rooms/room_category.rs @@ -0,0 +1,30 @@ +use crate::{DateTimeUtc, ProjectId, RoomCategoryId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_category")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RoomCategoryId, + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + pub name: String, + pub position: i32, + pub created_by: UserId, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::room::Entity")] + Rooms, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Rooms.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_member.rs b/libs/models/rooms/room_member.rs new file mode 100644 index 0000000..5940b13 --- /dev/null +++ b/libs/models/rooms/room_member.rs @@ -0,0 +1,49 @@ +use crate::{DateTimeUtc, RoomId, Seq, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::RoomMemberRole; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_member")] +pub struct Model { + #[sea_orm(primary_key)] + pub room: RoomId, + #[sea_orm(primary_key)] + pub user: UserId, + pub role: RoomMemberRole, + pub first_msg_in: Option, + pub joined_at: Option, + pub last_read_seq: Option, + /// Do Not Disturb: if true, suppress notifications for this room + pub do_not_disturb: bool, + /// DND start hour (0-23, local time). None means no scheduled DND. + pub dnd_start_hour: Option, + /// DND end hour (0-23, local time). None means no scheduled DND. + pub dnd_end_hour: Option, +} + +impl Model { + #[deprecated(since = "0.1.0", note = "role is now an enum")] + pub fn role_enum(&self) -> Result { + Ok(self.role.clone()) + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_message.rs b/libs/models/rooms/room_message.rs new file mode 100644 index 0000000..901c117 --- /dev/null +++ b/libs/models/rooms/room_message.rs @@ -0,0 +1,68 @@ +use crate::{DateTimeUtc, MessageId, RoomId, RoomThreadId, Seq, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +use super::{MessageContentType, MessageSenderType}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_message")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: MessageId, + pub seq: Seq, + pub room: RoomId, + pub sender_type: MessageSenderType, + pub sender_id: Option, + pub thread: Option, + pub in_reply_to: Option, + pub content: String, + pub content_type: MessageContentType, + pub edited_at: Option, + pub send_at: DateTimeUtc, + pub revoked: Option, + pub revoked_by: Option, + #[sea_orm(ignore)] + pub content_tsv: Option, +} + +impl Model { + #[deprecated(since = "0.1.0", note = "sender_type is now an enum")] + pub fn sender_type_enum(&self) -> Result { + Ok(self.sender_type.clone()) + } + + #[deprecated(since = "0.1.0", note = "content_type is now an enum")] + pub fn content_type_enum(&self) -> Result { + Ok(self.content_type.clone()) + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm( + belongs_to = "super::room_thread::Entity", + from = "Column::Thread", + to = "super::room_thread::Column::Id" + )] + Thread, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Thread.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_message_edit_history.rs b/libs/models/rooms/room_message_edit_history.rs new file mode 100644 index 0000000..6085bea --- /dev/null +++ b/libs/models/rooms/room_message_edit_history.rs @@ -0,0 +1,35 @@ +use crate::{DateTimeUtc, MessageId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_message_edit_history")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + pub message: MessageId, + pub user: UserId, + /// Previous content before edit + pub old_content: String, + /// New content after edit + pub new_content: String, + pub edited_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room_message::Entity", + from = "Column::Message", + to = "super::room_message::Column::Id" + )] + Message, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Message.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_message_reaction.rs b/libs/models/rooms/room_message_reaction.rs new file mode 100644 index 0000000..067b7fb --- /dev/null +++ b/libs/models/rooms/room_message_reaction.rs @@ -0,0 +1,74 @@ +use crate::{DateTimeUtc, MessageId, RoomId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_message_reaction")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + pub room: RoomId, + pub message: MessageId, + pub user: UserId, + pub emoji: String, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm( + belongs_to = "super::room_message::Entity", + from = "Column::Message", + to = "super::room_message::Column::Id" + )] + Message, + #[sea_orm( + belongs_to = "super::room_member::Entity", + from = "(Column::Room, Column::User)", + to = "(super::room_member::Column::Room, super::room_member::Column::User)" + )] + Member, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Message.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Member.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +/// Aggregated reaction counts per message for API responses +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MessageReactionSummary { + pub message_id: UserId, + /// emoji -> { count, reacted_by_current_user } + pub reactions: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReactionGroup { + pub emoji: String, + pub count: i64, + pub reacted_by_me: bool, + /// Sample of users who reacted (first 3) + pub users: Vec, +} diff --git a/libs/models/rooms/room_notifications.rs b/libs/models/rooms/room_notifications.rs new file mode 100644 index 0000000..d37adf6 --- /dev/null +++ b/libs/models/rooms/room_notifications.rs @@ -0,0 +1,85 @@ +use chrono::{DateTime, Utc}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +#[serde(rename_all = "snake_case")] +pub enum NotificationType { + #[sea_orm(string_value = "mention")] + Mention, + #[sea_orm(string_value = "invitation")] + Invitation, + #[sea_orm(string_value = "role_change")] + RoleChange, + #[sea_orm(string_value = "room_created")] + RoomCreated, + #[sea_orm(string_value = "room_deleted")] + RoomDeleted, + #[sea_orm(string_value = "system_announcement")] + SystemAnnouncement, +} + +impl std::fmt::Display for NotificationType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + NotificationType::Mention => "mention", + NotificationType::Invitation => "invitation", + NotificationType::RoleChange => "role_change", + NotificationType::RoomCreated => "room_created", + NotificationType::RoomDeleted => "room_deleted", + NotificationType::SystemAnnouncement => "system_announcement", + }; + write!(f, "{}", s) + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_notifications")] +#[serde(rename_all = "camelCase")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + #[sea_orm(nullable)] + pub room: Option, + #[sea_orm(nullable)] + pub project: Option, + #[sea_orm(nullable)] + pub user_id: Option, + #[sea_orm(column_name = "notification_type")] + pub notification_type: NotificationType, + #[sea_orm(nullable)] + pub related_message_id: Option, + #[sea_orm(nullable)] + pub related_user_id: Option, + #[sea_orm(nullable)] + pub related_room_id: Option, + #[sea_orm(column_name = "title")] + pub title: String, + #[sea_orm(column_name = "content", nullable)] + pub content: Option, + #[sea_orm(column_name = "metadata", nullable)] + pub metadata: Option, + #[sea_orm(column_name = "is_read")] + pub is_read: bool, + #[sea_orm(column_name = "is_archived")] + pub is_archived: bool, + #[sea_orm(column_name = "created_at")] + pub created_at: DateTime, + #[sea_orm(column_name = "read_at", nullable)] + pub read_at: Option>, + #[sea_orm(column_name = "expires_at", nullable)] + pub expires_at: Option>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_pin.rs b/libs/models/rooms/room_pin.rs new file mode 100644 index 0000000..842d753 --- /dev/null +++ b/libs/models/rooms/room_pin.rs @@ -0,0 +1,44 @@ +use crate::{DateTimeUtc, MessageId, RoomId, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_pin")] +pub struct Model { + #[sea_orm(primary_key)] + pub room: RoomId, + #[sea_orm(primary_key)] + pub message: MessageId, + pub pinned_by: UserId, + pub pinned_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm( + belongs_to = "super::room_message::Entity", + from = "Column::Message", + to = "super::room_message::Column::Id" + )] + Message, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Message.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/rooms/room_thread.rs b/libs/models/rooms/room_thread.rs new file mode 100644 index 0000000..cc5820f --- /dev/null +++ b/libs/models/rooms/room_thread.rs @@ -0,0 +1,44 @@ +use crate::{DateTimeUtc, RoomId, RoomThreadId, Seq, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "room_thread")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RoomThreadId, + pub room: RoomId, + pub parent: Seq, + pub created_by: UserId, + pub participants: sea_orm::JsonValue, + pub last_message_at: DateTimeUtc, + pub last_message_preview: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::Room", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm(has_many = "super::room_message::Entity")] + Messages, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Messages.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/system/label.rs b/libs/models/system/label.rs new file mode 100644 index 0000000..250e5d6 --- /dev/null +++ b/libs/models/system/label.rs @@ -0,0 +1,19 @@ +use crate::ProjectId; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "label")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "project_uuid")] + pub project: ProjectId, + pub name: String, + pub color: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/system/mod.rs b/libs/models/system/mod.rs new file mode 100644 index 0000000..5c0951b --- /dev/null +++ b/libs/models/system/mod.rs @@ -0,0 +1,5 @@ +pub use label::Entity as Label; +pub use notify::Entity as Notify; + +pub mod label; +pub mod notify; diff --git a/libs/models/system/notify.rs b/libs/models/system/notify.rs new file mode 100644 index 0000000..9e917b8 --- /dev/null +++ b/libs/models/system/notify.rs @@ -0,0 +1,25 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "notify")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "user_uuid")] + pub user: UserId, + pub title: String, + pub description: Option, + pub content: String, + pub url: Option, + pub kind: i32, + pub read_at: Option, + pub deleted_at: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/mod.rs b/libs/models/users/mod.rs new file mode 100644 index 0000000..702594a --- /dev/null +++ b/libs/models/users/mod.rs @@ -0,0 +1,25 @@ +pub use user::Entity as User; +pub use user_2fa::Entity as User2fa; +pub use user_activity_log::Entity as UserActivityLog; +pub use user_email::Entity as UserEmail; +pub use user_email_change::Entity as UserEmailChange; +pub use user_notification::Entity as UserNotification; +pub use user_password::Entity as UserPassword; +pub use user_password_reset::Entity as UserPasswordReset; +pub use user_preferences::Entity as UserPreferences; +pub use user_relation::Entity as UserRelation; +pub use user_ssh_key::Entity as UserSshKey; +pub use user_token::Entity as UserToken; + +pub mod user; +pub mod user_2fa; +pub mod user_activity_log; +pub mod user_email; +pub mod user_email_change; +pub mod user_notification; +pub mod user_password; +pub mod user_password_reset; +pub mod user_preferences; +pub mod user_relation; +pub mod user_ssh_key; +pub mod user_token; diff --git a/libs/models/users/user.rs b/libs/models/users/user.rs new file mode 100644 index 0000000..aa412f3 --- /dev/null +++ b/libs/models/users/user.rs @@ -0,0 +1,23 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user")] +pub struct Model { + #[sea_orm(primary_key)] + pub uid: UserId, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub website_url: Option, + pub organization: Option, + pub last_sign_in_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_2fa.rs b/libs/models/users/user_2fa.rs new file mode 100644 index 0000000..d5ec61c --- /dev/null +++ b/libs/models/users/user_2fa.rs @@ -0,0 +1,49 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"totp"` or `"webauthn"` in the database. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum TwoFactorMethod { + Totp, + WebAuthn, +} + +impl std::fmt::Display for TwoFactorMethod { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TwoFactorMethod::Totp => write!(f, "totp"), + TwoFactorMethod::WebAuthn => write!(f, "webauthn"), + } + } +} + +impl std::str::FromStr for TwoFactorMethod { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "totp" => Ok(TwoFactorMethod::Totp), + "webauthn" => Ok(TwoFactorMethod::WebAuthn), + _ => Err("unknown two-factor method"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_2fa")] +pub struct Model { + #[sea_orm(primary_key)] + pub user: UserId, + pub method: String, + pub secret: Option, + #[sea_orm(column_type = "Json")] + pub backup_codes: serde_json::Value, + pub is_enabled: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_activity_log.rs b/libs/models/users/user_activity_log.rs new file mode 100644 index 0000000..58e796f --- /dev/null +++ b/libs/models/users/user_activity_log.rs @@ -0,0 +1,69 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Auth action types: login, logout, register, password_change, password_reset, 2fa_enabled, etc. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum AuthAction { + Login, + Logout, + Register, + PasswordChange, + PasswordReset, + TwoFactorEnabled, + TwoFactorDisabled, + TwoFactorBackupCodesRegenerated, +} + +impl std::fmt::Display for AuthAction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AuthAction::Login => write!(f, "login"), + AuthAction::Logout => write!(f, "logout"), + AuthAction::Register => write!(f, "register"), + AuthAction::PasswordChange => write!(f, "password_change"), + AuthAction::PasswordReset => write!(f, "password_reset"), + AuthAction::TwoFactorEnabled => write!(f, "2fa_enabled"), + AuthAction::TwoFactorDisabled => write!(f, "2fa_disabled"), + AuthAction::TwoFactorBackupCodesRegenerated => { + write!(f, "2fa_backup_codes_regenerated") + } + } + } +} + +impl std::str::FromStr for AuthAction { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "login" => Ok(AuthAction::Login), + "logout" => Ok(AuthAction::Logout), + "register" => Ok(AuthAction::Register), + "password_change" => Ok(AuthAction::PasswordChange), + "password_reset" => Ok(AuthAction::PasswordReset), + "2fa_enabled" => Ok(AuthAction::TwoFactorEnabled), + "2fa_disabled" => Ok(AuthAction::TwoFactorDisabled), + "2fa_backup_codes_regenerated" => Ok(AuthAction::TwoFactorBackupCodesRegenerated), + _ => Err("unknown auth action"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_activity_log")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub user_uid: Option, + pub action: String, + pub ip_address: Option, + pub user_agent: Option, + pub details: JsonValue, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_email.rs b/libs/models/users/user_email.rs new file mode 100644 index 0000000..af63e4f --- /dev/null +++ b/libs/models/users/user_email.rs @@ -0,0 +1,17 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_email")] +pub struct Model { + #[sea_orm(primary_key)] + pub user: UserId, + pub email: String, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_email_change.rs b/libs/models/users/user_email_change.rs new file mode 100644 index 0000000..9e2934b --- /dev/null +++ b/libs/models/users/user_email_change.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_email_change")] +pub struct Model { + #[sea_orm(primary_key)] + pub token: String, + pub user_uid: UserId, + pub new_email: String, + pub expires_at: DateTimeUtc, + pub used: bool, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_notification.rs b/libs/models/users/user_notification.rs new file mode 100644 index 0000000..8d79bea --- /dev/null +++ b/libs/models/users/user_notification.rs @@ -0,0 +1,57 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"daily"`, `"weekly"`, or `"never"` in the database. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum DigestMode { + Daily, + Weekly, + Never, +} + +impl std::fmt::Display for DigestMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DigestMode::Daily => write!(f, "daily"), + DigestMode::Weekly => write!(f, "weekly"), + DigestMode::Never => write!(f, "never"), + } + } +} + +impl std::str::FromStr for DigestMode { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "daily" => Ok(DigestMode::Daily), + "weekly" => Ok(DigestMode::Weekly), + "never" => Ok(DigestMode::Never), + _ => Err("unknown digest mode"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_notification")] +pub struct Model { + #[sea_orm(primary_key)] + pub user: UserId, + pub email_enabled: bool, + pub in_app_enabled: bool, + pub push_enabled: bool, + pub digest_mode: String, + pub dnd_enabled: bool, + pub dnd_start_minute: Option, + pub dnd_end_minute: Option, + pub marketing_enabled: bool, + pub security_enabled: bool, + pub product_enabled: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_password.rs b/libs/models/users/user_password.rs new file mode 100644 index 0000000..76d6c81 --- /dev/null +++ b/libs/models/users/user_password.rs @@ -0,0 +1,20 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_password")] +pub struct Model { + #[sea_orm(primary_key)] + pub user: UserId, + pub password_hash: String, + pub password_salt: Option, + pub is_active: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_password_reset.rs b/libs/models/users/user_password_reset.rs new file mode 100644 index 0000000..55fbf85 --- /dev/null +++ b/libs/models/users/user_password_reset.rs @@ -0,0 +1,19 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_password_reset")] +pub struct Model { + #[sea_orm(primary_key)] + pub token: String, + pub user_uid: UserId, + pub expires_at: DateTimeUtc, + pub used: bool, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_preferences.rs b/libs/models/users/user_preferences.rs new file mode 100644 index 0000000..ac133fd --- /dev/null +++ b/libs/models/users/user_preferences.rs @@ -0,0 +1,22 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_preferences")] +pub struct Model { + #[sea_orm(primary_key)] + pub user: UserId, + pub language: String, + pub theme: String, + pub timezone: String, + pub email_notifications: bool, + pub in_app_notifications: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_relation.rs b/libs/models/users/user_relation.rs new file mode 100644 index 0000000..95373b4 --- /dev/null +++ b/libs/models/users/user_relation.rs @@ -0,0 +1,47 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"follow"` or `"block"` in the database. +/// Use `FromStr` / `ToString` for type-safe access. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum RelationType { + Follow, + Block, +} + +impl std::fmt::Display for RelationType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + RelationType::Follow => write!(f, "follow"), + RelationType::Block => write!(f, "block"), + } + } +} + +impl std::str::FromStr for RelationType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "follow" => Ok(RelationType::Follow), + "block" => Ok(RelationType::Block), + _ => Err("unknown relation type"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_relation")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub user: UserId, + pub target: UserId, + pub relation_type: String, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_ssh_key.rs b/libs/models/users/user_ssh_key.rs new file mode 100644 index 0000000..1c93426 --- /dev/null +++ b/libs/models/users/user_ssh_key.rs @@ -0,0 +1,57 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Stored as `"ed25519"`, `"rsa"`, or `"ecdsa"` in the database. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum KeyType { + Ed25519, + Rsa, + Ecdsa, +} + +impl std::fmt::Display for KeyType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + KeyType::Ed25519 => write!(f, "ed25519"), + KeyType::Rsa => write!(f, "rsa"), + KeyType::Ecdsa => write!(f, "ecdsa"), + } + } +} + +impl std::str::FromStr for KeyType { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "ed25519" => Ok(KeyType::Ed25519), + "rsa" => Ok(KeyType::Rsa), + "ecdsa" => Ok(KeyType::Ecdsa), + _ => Err("unknown key type"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_ssh_key")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub user: UserId, + pub title: String, + pub public_key: String, + pub fingerprint: String, + pub key_type: String, + pub key_bits: Option, + pub is_verified: bool, + pub last_used_at: Option, + pub expires_at: Option, + pub is_revoked: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/users/user_token.rs b/libs/models/users/user_token.rs new file mode 100644 index 0000000..6069ef5 --- /dev/null +++ b/libs/models/users/user_token.rs @@ -0,0 +1,24 @@ +use crate::{DateTimeUtc, UserId}; +use sea_orm::JsonValue; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "user_token")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + pub user: UserId, + pub name: String, + pub token_hash: String, + pub scopes: JsonValue, + pub expires_at: Option, + pub is_revoked: bool, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/workspaces/mod.rs b/libs/models/workspaces/mod.rs new file mode 100644 index 0000000..e5601f6 --- /dev/null +++ b/libs/models/workspaces/mod.rs @@ -0,0 +1,9 @@ +pub mod workspace; +pub mod workspace_billing; +pub mod workspace_billing_history; +pub mod workspace_membership; + +pub use workspace::Entity as Workspace; +pub use workspace_billing::Entity as WorkspaceBilling; +pub use workspace_billing_history::Entity as WorkspaceBillingHistory; +pub use workspace_membership::WorkspaceRole; diff --git a/libs/models/workspaces/workspace.rs b/libs/models/workspaces/workspace.rs new file mode 100644 index 0000000..8ab9f2c --- /dev/null +++ b/libs/models/workspaces/workspace.rs @@ -0,0 +1,27 @@ +use crate::DateTimeUtc; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "workspace")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: Uuid, + pub slug: String, + pub name: String, + pub description: Option, + pub avatar_url: Option, + pub plan: String, + pub billing_email: Option, + pub stripe_customer_id: Option, + pub stripe_subscription_id: Option, + pub plan_expires_at: Option, + pub deleted_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/workspaces/workspace_billing.rs b/libs/models/workspaces/workspace_billing.rs new file mode 100644 index 0000000..83dc76d --- /dev/null +++ b/libs/models/workspaces/workspace_billing.rs @@ -0,0 +1,24 @@ +use crate::{DateTimeUtc, Decimal, WorkspaceId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "workspace_billing")] +pub struct Model { + #[sea_orm(primary_key, column_name = "workspace_id")] + pub workspace_id: WorkspaceId, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub balance: Decimal, + #[sea_orm(column_type = "Text")] + pub currency: String, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub monthly_quota: Decimal, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub total_spent: Decimal, + pub updated_at: DateTimeUtc, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/workspaces/workspace_billing_history.rs b/libs/models/workspaces/workspace_billing_history.rs new file mode 100644 index 0000000..9a06a7e --- /dev/null +++ b/libs/models/workspaces/workspace_billing_history.rs @@ -0,0 +1,27 @@ +use crate::{DateTimeUtc, Decimal, UserId, WorkspaceId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "workspace_billing_history")] +pub struct Model { + #[sea_orm(primary_key, column_name = "uid")] + pub uid: Uuid, + #[sea_orm(column_name = "workspace_id")] + pub workspace_id: WorkspaceId, + #[sea_orm(column_name = "user_id")] + pub user_id: Option, + #[sea_orm(column_type = "Decimal(Some((20, 4)))")] + pub amount: Decimal, + #[sea_orm(column_type = "Text")] + pub currency: String, + #[sea_orm(column_type = "Text")] + pub reason: String, + pub extra: Option, + pub created_at: DateTimeUtc, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/models/workspaces/workspace_membership.rs b/libs/models/workspaces/workspace_membership.rs new file mode 100644 index 0000000..1f85196 --- /dev/null +++ b/libs/models/workspaces/workspace_membership.rs @@ -0,0 +1,61 @@ +use crate::{DateTimeUtc, UserId, WorkspaceId}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Workspace membership role. Values: "owner", "admin", "member" +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum WorkspaceRole { + Owner, + Admin, + Member, +} + +impl std::fmt::Display for WorkspaceRole { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + WorkspaceRole::Owner => write!(f, "owner"), + WorkspaceRole::Admin => write!(f, "admin"), + WorkspaceRole::Member => write!(f, "member"), + } + } +} + +impl std::str::FromStr for WorkspaceRole { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + "owner" => Ok(WorkspaceRole::Owner), + "admin" => Ok(WorkspaceRole::Admin), + "member" => Ok(WorkspaceRole::Member), + _ => Err("unknown workspace role"), + } + } +} + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "workspace_membership")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(column_name = "workspace_id")] + pub workspace_id: WorkspaceId, + #[sea_orm(column_name = "user_id")] + pub user_id: UserId, + pub role: String, + pub status: String, + pub invited_by: Option, + pub joined_at: DateTimeUtc, + pub invite_token: Option, + pub invite_expires_at: Option, +} + +impl Model { + pub fn role_enum(&self) -> Result { + self.role.parse() + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/libs/queue/Cargo.toml b/libs/queue/Cargo.toml new file mode 100644 index 0000000..7c6f461 --- /dev/null +++ b/libs/queue/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "queue" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[lib] +path = "lib.rs" +name = "queue" + +[dependencies] +redis = { workspace = true } +deadpool-redis = { workspace = true, features = ["rt_tokio_1", "cluster-async", "cluster"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tokio = { workspace = true, features = ["rt", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true } +futures = { workspace = true } +anyhow = { workspace = true } +thiserror = { workspace = true } +uuid = { workspace = true, features = ["v7", "v4", "serde"] } +chrono = { workspace = true, features = ["serde"] } +slog = { workspace = true } + +[lints] +workspace = true diff --git a/libs/queue/lib.rs b/libs/queue/lib.rs new file mode 100644 index 0000000..d0084ce --- /dev/null +++ b/libs/queue/lib.rs @@ -0,0 +1,15 @@ +//! Room message queue: Redis Streams + NATS. + +pub mod producer; +pub mod types; +pub mod worker; + +pub use producer::{MessageProducer, RedisPubSub}; +pub use types::{ + AgentTaskEvent, EmailEnvelope, ProjectRoomEvent, ReactionGroup, RoomMessageEnvelope, + RoomMessageEvent, RoomMessageStreamChunkEvent, +}; +pub use worker::{ + EmailSendFn, EmailSendFut, GetRedis, PersistFn, RedisFuture, room_worker_task, + start as start_worker, start_email_worker, +}; diff --git a/libs/queue/producer.rs b/libs/queue/producer.rs new file mode 100644 index 0000000..0bbcbb7 --- /dev/null +++ b/libs/queue/producer.rs @@ -0,0 +1,228 @@ +//! Publishes room messages into Redis Streams + Redis Pub/Sub (replaces NATS). + +use crate::types::{ + AgentTaskEvent, EmailEnvelope, ProjectRoomEvent, ReactionGroup, RoomMessageEnvelope, + RoomMessageEvent, +}; +use anyhow::Context; +use deadpool_redis::cluster::Connection as RedisConn; +use std::sync::Arc; + +/// Redis Pub/Sub client for broadcasting room events to all server instances. +#[derive(Clone)] +pub struct RedisPubSub { + /// Shared connection pool; cloned handles share the same pool. + pub get_redis: + Arc tokio::task::JoinHandle> + Send + Sync>, + pub log: slog::Logger, +} + +impl RedisPubSub { + /// Publish a serialised event to a Redis channel. + async fn publish_channel(&self, channel: &str, payload: &[u8]) { + let redis = match (self.get_redis)().await { + Ok(Ok(c)) => c, + Ok(Err(e)) => { + slog::error!(self.log, "redis pool get failed"; "error" => %e); + return; + } + Err(_) => { + slog::error!(self.log, "redis pool task panicked"); + return; + } + }; + let mut conn: RedisConn = redis; + if let Err(e) = redis::cmd("PUBLISH") + .arg(channel) + .arg(payload) + .query_async::<()>(&mut conn) + .await + { + slog::error!(self.log, "Redis PUBLISH failed"; "channel" => %channel, "error" => %e); + } + } + + /// Broadcast a RoomMessageEvent to all servers subscribed to this room. + pub async fn publish_room_message(&self, room_id: uuid::Uuid, event: &RoomMessageEvent) { + let channel = format!("room:pub:{}", room_id); + let payload = match serde_json::to_vec(event) { + Ok(p) => p, + Err(e) => { + slog::error!(self.log, "serialise RoomMessageEvent failed"; "error" => %e); + return; + } + }; + self.publish_channel(&channel, &payload).await; + } + + /// Broadcast a project-level event to all servers subscribed to this project. + pub async fn publish_project_room_event( + &self, + project_id: uuid::Uuid, + event: &ProjectRoomEvent, + ) { + let channel = format!("project:pub:{}", project_id); + let payload = match serde_json::to_vec(event) { + Ok(p) => p, + Err(e) => { + slog::error!(self.log, "serialise ProjectRoomEvent failed"; "error" => %e); + return; + } + }; + self.publish_channel(&channel, &payload).await; + } + + /// Broadcast an agent task event to all servers subscribed to this project. + pub async fn publish_agent_task_event(&self, project_id: uuid::Uuid, event: &AgentTaskEvent) { + let channel = format!("task:pub:{}", project_id); + let payload = match serde_json::to_vec(event) { + Ok(p) => p, + Err(e) => { + slog::error!(self.log, "serialise AgentTaskEvent failed"; "error" => %e); + return; + } + }; + self.publish_channel(&channel, &payload).await; + } +} + +/// Produces room messages into Redis Streams + Redis Pub/Sub. +#[derive(Clone)] +pub struct MessageProducer { + pub get_redis: + Arc tokio::task::JoinHandle> + Send + Sync>, + maxlen: i64, + /// Redis Pub/Sub client used to fan-out events to all server instances. + pub pubsub: Option, + log: slog::Logger, +} + +impl MessageProducer { + pub fn new( + get_redis: Arc< + dyn Fn() -> tokio::task::JoinHandle> + Send + Sync, + >, + pubsub: Option, + maxlen: i64, + log: slog::Logger, + ) -> Self { + Self { + get_redis, + maxlen, + pubsub, + log, + } + } + + pub async fn publish( + &self, + room_id: uuid::Uuid, + envelope: RoomMessageEnvelope, + ) -> anyhow::Result { + let redis_key = format!("room:stream:{room_id}"); + let payload = serde_json::to_string(&envelope)?; + + let redis = (self.get_redis)().await??; + let mut conn: RedisConn = redis; + + let entry_id: String = redis::cmd("XADD") + .arg(&redis_key) + .arg("MAXLEN") + .arg("~") + .arg(self.maxlen) + .arg("*") + .arg("data") + .arg(&payload) + .query_async(&mut conn) + .await + .context("XADD to Redis Stream")?; + + slog::info!(self.log, "message queued to stream"; + "room_id" => %room_id, "entry_id" => %entry_id); + + // Fan-out via Redis Pub/Sub so all server instances can push to their WS clients. + if let Some(pubsub) = &self.pubsub { + let event = RoomMessageEvent::from(envelope); + pubsub.publish_room_message(room_id, &event).await; + } + + Ok(entry_id) + } + + /// Publish a project-level room event via Pub/Sub (no Redis Stream write). + pub async fn publish_project_room_event( + &self, + project_id: uuid::Uuid, + event: ProjectRoomEvent, + ) { + let Some(pubsub) = &self.pubsub else { + slog::warn!(self.log, "pubsub not configured, skipping project event"); + return; + }; + pubsub.publish_project_room_event(project_id, &event).await; + } + + /// Publish an agent task event via Pub/Sub (no Redis Stream write). + pub async fn publish_agent_task_event(&self, project_id: uuid::Uuid, event: AgentTaskEvent) { + let Some(pubsub) = &self.pubsub else { + slog::warn!(self.log, "pubsub not configured, skipping task event"); + return; + }; + pubsub.publish_agent_task_event(project_id, &event).await; + } + + /// Broadcast a reaction-update event to all WS clients subscribed to this room. + /// Unlike `publish()`, this does NOT write to the Redis Stream. + pub async fn publish_reaction_event( + &self, + room_id: uuid::Uuid, + _message_id: uuid::Uuid, + reactions: Vec, + ) { + let Some(pubsub) = &self.pubsub else { + slog::warn!(self.log, "pubsub not configured, skipping reaction event"); + return; + }; + let event = RoomMessageEvent { + id: uuid::Uuid::now_v7(), + room_id, + sender_type: String::new(), + sender_id: None, + thread_id: None, + in_reply_to: None, + content: String::new(), + content_type: String::new(), + send_at: chrono::Utc::now(), + seq: 0, + display_name: None, + reactions: Some(reactions), + }; + pubsub.publish_room_message(room_id, &event).await; + } + + /// Publish an email message to the Redis Stream for async processing. + pub async fn publish_email(&self, envelope: EmailEnvelope) -> anyhow::Result { + let redis_key = "email:stream"; + let payload = serde_json::to_string(&envelope)?; + + let redis = (self.get_redis)().await??; + let mut conn: RedisConn = redis; + + let entry_id: String = redis::cmd("XADD") + .arg(redis_key) + .arg("MAXLEN") + .arg("~") + .arg(self.maxlen) + .arg("*") + .arg("data") + .arg(&payload) + .query_async(&mut conn) + .await + .context("XADD email to Redis Stream")?; + + slog::info!(self.log, "email queued to stream"; + "to" => %envelope.to, "entry_id" => %entry_id); + + Ok(entry_id) + } +} diff --git a/libs/queue/types.rs b/libs/queue/types.rs new file mode 100644 index 0000000..3c4cc01 --- /dev/null +++ b/libs/queue/types.rs @@ -0,0 +1,117 @@ +//! Message types shared between producer and worker. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RoomMessageEnvelope { + pub id: Uuid, + pub dedup_key: Option, + pub room_id: Uuid, + pub sender_type: String, + pub sender_id: Option, + pub thread_id: Option, + pub in_reply_to: Option, + pub content: String, + pub content_type: String, + pub send_at: DateTime, + pub seq: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RoomMessageEvent { + pub id: Uuid, + pub room_id: Uuid, + pub sender_type: String, + pub sender_id: Option, + pub thread_id: Option, + pub in_reply_to: Option, + pub content: String, + pub content_type: String, + pub send_at: DateTime, + pub seq: i64, + pub display_name: Option, + /// Present when this event carries reaction updates for the message. + #[serde(skip_serializing_if = "Option::is_none")] + pub reactions: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReactionGroup { + pub emoji: String, + pub count: i64, + pub reacted_by_me: bool, + pub users: Vec, +} + +impl From for RoomMessageEvent { + fn from(e: RoomMessageEnvelope) -> Self { + Self { + id: e.id, + room_id: e.room_id, + sender_type: e.sender_type, + sender_id: e.sender_id, + thread_id: e.thread_id, + in_reply_to: e.in_reply_to, + content: e.content, + content_type: e.content_type, + send_at: e.send_at, + seq: e.seq, + display_name: None, + reactions: None, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectRoomEvent { + pub event_type: String, + pub project_id: Uuid, + pub room_id: Option, + pub category_id: Option, + pub message_id: Option, + pub seq: Option, + pub timestamp: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RoomMessageStreamChunkEvent { + pub message_id: Uuid, + pub room_id: Uuid, + pub content: String, + pub done: bool, + pub error: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailEnvelope { + pub id: Uuid, + pub to: String, + pub subject: String, + pub body: String, + pub created_at: DateTime, +} + +/// Agent task event pushed via Redis Pub/Sub to notify WebSocket clients. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AgentTaskEvent { + /// Task ID + pub task_id: i64, + /// Project this task belongs to. + pub project_id: Uuid, + /// Parent task ID (null for root tasks). + pub parent_id: Option, + /// Event type: started | progress | done | failed | child_done + pub event: String, + /// Human-readable progress/status text. + pub message: Option, + /// Task output (only on done event). + pub output: Option, + /// Error message (only on failed event). + pub error: Option, + /// Current status. + pub status: String, + /// Timestamp. + pub timestamp: DateTime, +} diff --git a/libs/queue/worker.rs b/libs/queue/worker.rs new file mode 100644 index 0000000..153aa6d --- /dev/null +++ b/libs/queue/worker.rs @@ -0,0 +1,294 @@ +//! Redis Streams consumer — delegates persistence to the caller. + +use crate::types::{EmailEnvelope, RoomMessageEnvelope}; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; + +const BATCH_SIZE: usize = 50; +const FLUSH_INTERVAL_SECS: u64 = 1; +const STREAM_PREFIX: &str = "room:stream"; +const EMAIL_STREAM_KEY: &str = "email:stream"; +const GROUP: &str = "room-worker"; +const EMAIL_GROUP: &str = "email-worker"; + +fn stream_key(room_id: uuid::Uuid) -> String { + format!("{STREAM_PREFIX}:{room_id}") +} + +fn consumer_id() -> String { + format!("worker-{}", uuid::Uuid::new_v4()) +} + +pub type RedisFuture = + Pin> + Send>>; +pub type PersistFn = Arc) -> PersistFut + Send + Sync>; +pub type PersistFut = Pin> + Send>>; +pub type GetRedis = Arc RedisFuture + Send + Sync>; +pub type StreamEntries = Vec<(String, Vec<(String, Vec)>)>; + +pub async fn start( + room_ids: Vec, + get_redis: GetRedis, + persist_fn: PersistFn, + mut shutdown_rx: tokio::sync::broadcast::Receiver<()>, + log: slog::Logger, +) { + let consumer = consumer_id(); + slog::info!(log, "room-message worker starting"; "consumer" => %consumer, "rooms" => ?room_ids); + + let handles: Vec<_> = room_ids + .into_iter() + .map(|room_id| { + let get_redis = get_redis.clone(); + let persist_fn = persist_fn.clone(); + let shutdown = shutdown_rx.resubscribe(); + let consumer = consumer.clone(); + let log = log.clone(); + + tokio::spawn(room_worker_task( + room_id, consumer, get_redis, persist_fn, shutdown, log, + )) + }) + .collect(); + + let _ = shutdown_rx.recv().await; + + for h in handles { + let _ = h.await; + } +} + +pub async fn room_worker_task( + room_id: uuid::Uuid, + consumer: String, + get_redis: GetRedis, + persist_fn: PersistFn, + mut shutdown_rx: tokio::sync::broadcast::Receiver<()>, + log: slog::Logger, +) { + let key = stream_key(room_id); + slog::info!(log, "room worker task started"; "room_id" => %room_id); + + loop { + tokio::select! { + _ = shutdown_rx.recv() => { + slog::info!(log, "room worker task shutting down"; "room_id" => %room_id); + break; + } + _ = tokio::time::sleep(std::time::Duration::from_secs(FLUSH_INTERVAL_SECS)) => {} + } + + match run_once(&key, &consumer, &get_redis, &persist_fn, &log).await { + Ok(0) => {} + Ok(n) => slog::debug!(log, "batch flushed"; "room_id" => %room_id, "n" => n), + Err(e) => { + slog::error!(log, "stream consume error"; "room_id" => %room_id, "error" => %e); + tokio::time::sleep(std::time::Duration::from_secs(2)).await; + } + } + } +} + +async fn run_once( + stream_key: &str, + consumer: &str, + get_redis: &GetRedis, + persist_fn: &PersistFn, + log: &slog::Logger, +) -> anyhow::Result { + let redis = (get_redis)().await?; + let mut conn: deadpool_redis::cluster::Connection = redis; + + let _: Result<(), _> = redis::cmd("XGROUP") + .arg("CREATE") + .arg(stream_key) + .arg(GROUP) + .arg("0") + .arg("MKSTREAM") + .query_async(&mut conn) + .await; + + let results: StreamEntries = redis::cmd("XREADGROUP") + .arg("GROUP") + .arg(GROUP) + .arg(consumer) + .arg("COUNT") + .arg(BATCH_SIZE) + .arg("BLOCK") + .arg(1000) + .arg("STREAMS") + .arg(stream_key) + .arg(">") + .query_async(&mut conn) + .await + .unwrap_or_default(); + + if results.is_empty() { + return Ok(0); + } + + let mut batch: Vec<(String, RoomMessageEnvelope)> = Vec::with_capacity(BATCH_SIZE); + for (_stream_name, entries) in results { + for (entry_id, field_values) in entries { + if let Some(data) = extract_field(&field_values, "data") { + match serde_json::from_str::(&data) { + Ok(env) => batch.push((entry_id, env)), + Err(e) => slog::warn!(log, "malformed envelope"; + "entry_id" => %entry_id, "error" => %e), + } + } + } + } + + if batch.is_empty() { + return Ok(0); + } + + let entry_ids: Vec = batch.iter().map(|(id, _)| id.clone()).collect(); + let envelopes: Vec = batch.into_iter().map(|(_, e)| e).collect(); + + if let Err(e) = persist_fn(envelopes).await { + slog::error!(log, "persist_fn failed — entries NOT acked (will retry)"; "error" => %e); + return Err(e); + } + + let redis = (get_redis)().await?; + let mut conn: deadpool_redis::cluster::Connection = redis; + for entry_id in &entry_ids { + let _: Result<(), _> = redis::cmd("XACK") + .arg(stream_key) + .arg(GROUP) + .arg(entry_id) + .query_async(&mut conn) + .await; + } + + slog::info!(log, "batch persisted and acked"; "n" => entry_ids.len()); + Ok(entry_ids.len()) +} + +fn extract_field(values: &[String], key: &str) -> Option { + let mut it = values.iter(); + loop { + let field = match it.next() { + Some(f) => f, + None => return None, + }; + let value = it.next()?; + if field == key { + return Some(value.clone()); + } + } +} + +/// Email send function type. +pub type EmailSendFn = Arc) -> EmailSendFut + Send + Sync>; +pub type EmailSendFut = Pin> + Send>>; + +/// Start the email worker that consumes from the email stream. +pub async fn start_email_worker( + get_redis: GetRedis, + send_fn: EmailSendFn, + mut shutdown_rx: tokio::sync::broadcast::Receiver<()>, + log: slog::Logger, +) { + let consumer = format!("email-worker-{}", uuid::Uuid::new_v4()); + slog::info!(log, "email worker starting"; "consumer" => %consumer); + + loop { + tokio::select! { + _ = shutdown_rx.recv() => { + slog::info!(log, "email worker shutting down"); + break; + } + _ = tokio::time::sleep(std::time::Duration::from_secs(FLUSH_INTERVAL_SECS)) => {} + } + + match email_run_once(&consumer, &get_redis, &send_fn, &log).await { + Ok(0) => {} + Ok(n) => slog::debug!(log, "email batch processed"; "n" => n), + Err(e) => { + slog::error!(log, "email stream consume error"; "error" => %e); + tokio::time::sleep(std::time::Duration::from_secs(2)).await; + } + } + } +} + +async fn email_run_once( + consumer: &str, + get_redis: &GetRedis, + send_fn: &EmailSendFn, + log: &slog::Logger, +) -> anyhow::Result { + let redis = (get_redis)().await?; + let mut conn: deadpool_redis::cluster::Connection = redis; + + let _: Result<(), _> = redis::cmd("XGROUP") + .arg("CREATE") + .arg(EMAIL_STREAM_KEY) + .arg(EMAIL_GROUP) + .arg("0") + .arg("MKSTREAM") + .query_async(&mut conn) + .await; + + let results: StreamEntries = redis::cmd("XREADGROUP") + .arg("GROUP") + .arg(EMAIL_GROUP) + .arg(consumer) + .arg("COUNT") + .arg(BATCH_SIZE) + .arg("BLOCK") + .arg(1000) + .arg("STREAMS") + .arg(EMAIL_STREAM_KEY) + .arg(">") + .query_async(&mut conn) + .await + .unwrap_or_default(); + + if results.is_empty() { + return Ok(0); + } + + let mut batch: Vec<(String, EmailEnvelope)> = Vec::with_capacity(BATCH_SIZE); + for (_stream_name, entries) in results { + for (entry_id, field_values) in entries { + if let Some(data) = extract_field(&field_values, "data") { + match serde_json::from_str::(&data) { + Ok(env) => batch.push((entry_id, env)), + Err(e) => slog::warn!(log, "malformed email envelope"; + "entry_id" => %entry_id, "error" => %e), + } + } + } + } + + if batch.is_empty() { + return Ok(0); + } + + let entry_ids: Vec = batch.iter().map(|(id, _)| id.clone()).collect(); + let envelopes: Vec = batch.into_iter().map(|(_, e)| e).collect(); + + if let Err(e) = send_fn(envelopes).await { + slog::error!(log, "email send_fn failed — entries NOT acked (will retry)"; "error" => %e); + return Err(e); + } + + let redis = (get_redis)().await?; + let mut conn: deadpool_redis::cluster::Connection = redis; + for entry_id in &entry_ids { + let _: Result<(), _> = redis::cmd("XACK") + .arg(EMAIL_STREAM_KEY) + .arg(EMAIL_GROUP) + .arg(entry_id) + .query_async(&mut conn) + .await; + } + + slog::info!(log, "email batch sent and acked"; "n" => entry_ids.len()); + Ok(entry_ids.len()) +} diff --git a/libs/room/Cargo.toml b/libs/room/Cargo.toml new file mode 100644 index 0000000..bd6aa7f --- /dev/null +++ b/libs/room/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "room" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true + +[lib] +path = "src/lib.rs" +name = "room" + +[dependencies] +models = { workspace = true } +db = { workspace = true } +session = { workspace = true } +queue = { workspace = true } +agent = { path = "../agent" } + +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +slog = { workspace = true } +chrono = { workspace = true, features = ["serde"] } +uuid = { workspace = true, features = ["serde", "v7", "v4"] } +sea-orm = { workspace = true } +anyhow = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +tokio-stream = { workspace = true } +futures = { workspace = true } +deadpool-redis = { workspace = true, features = ["rt_tokio_1", "cluster-async", "cluster"] } +utoipa = { workspace = true, features = ["uuid", "chrono"] } +metrics = "0.21" +regex-lite = "0.1.6" +redis = { workspace = true, features = ["tokio-comp", "connection-manager"] } +async-openai = { workspace = true } +hostname = "0.4" +dashmap = "7.0.0-rc2" +lru = "0.12.0" + +[lints] +workspace = true diff --git a/libs/room/src/ai.rs b/libs/room/src/ai.rs new file mode 100644 index 0000000..aef3e5d --- /dev/null +++ b/libs/room/src/ai.rs @@ -0,0 +1,113 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::room_ai; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_ai_list( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let models = room_ai::Entity::find() + .filter(room_ai::Column::Room.eq(room_id)) + .all(&self.db) + .await?; + + Ok(models + .into_iter() + .map(super::RoomAiResponse::from) + .collect()) + } + + pub async fn room_ai_upsert( + &self, + room_id: Uuid, + request: super::RoomAiUpsertRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_admin(room_id, user_id).await?; + + let now = Utc::now(); + let existing = room_ai::Entity::find_by_id((room_id, request.model)) + .one(&self.db) + .await?; + + let model = if let Some(existing) = existing { + let mut active: room_ai::ActiveModel = existing.into(); + if request.version.is_some() { + active.version = Set(request.version); + } + if request.history_limit.is_some() { + active.history_limit = Set(request.history_limit); + } + if request.system_prompt.is_some() { + active.system_prompt = Set(request.system_prompt); + } + if request.temperature.is_some() { + active.temperature = Set(request.temperature); + } + if request.max_tokens.is_some() { + active.max_tokens = Set(request.max_tokens); + } + if request.use_exact.is_some() { + active.use_exact = Set(request.use_exact.unwrap_or(false)); + } + if request.think.is_some() { + active.think = Set(request.think.unwrap_or(false)); + } + if request.stream.is_some() { + active.stream = Set(request.stream.unwrap_or(false)); + } + if request.min_score.is_some() { + active.min_score = Set(request.min_score); + } + active.updated_at = Set(now); + active.update(&self.db).await? + } else { + room_ai::ActiveModel { + room: Set(room_id), + model: Set(request.model), + version: Set(request.version), + call_count: Set(0), + last_call_at: Set(None), + history_limit: Set(request.history_limit), + system_prompt: Set(request.system_prompt), + temperature: Set(request.temperature), + max_tokens: Set(request.max_tokens), + use_exact: Set(request.use_exact.unwrap_or(false)), + think: Set(request.think.unwrap_or(false)), + stream: Set(request.stream.unwrap_or(false)), + min_score: Set(request.min_score), + created_at: Set(now), + updated_at: Set(now), + } + .insert(&self.db) + .await? + }; + + Ok(super::RoomAiResponse::from(model)) + } + + pub async fn room_ai_delete( + &self, + room_id: Uuid, + model_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + self.require_room_admin(room_id, user_id).await?; + + room_ai::Entity::delete_by_id((room_id, model_id)) + .exec(&self.db) + .await?; + Ok(()) + } +} diff --git a/libs/room/src/category.rs b/libs/room/src/category.rs new file mode 100644 index 0000000..4e388a6 --- /dev/null +++ b/libs/room/src/category.rs @@ -0,0 +1,168 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{room, room_category}; +use queue::ProjectRoomEvent; +use sea_orm::prelude::Expr; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_category_list( + &self, + project_name: String, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + let project = self.utils_find_project_by_name(project_name).await?; + self.check_project_access(project.id, user_id).await?; + + let models = room_category::Entity::find() + .filter(room_category::Column::Project.eq(project.id)) + .order_by_asc(room_category::Column::Position) + .all(&self.db) + .await?; + + Ok(models + .into_iter() + .map(super::RoomCategoryResponse::from) + .collect()) + } + + pub async fn room_category_create( + &self, + project_name: String, + request: super::RoomCategoryCreateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let project = self.utils_find_project_by_name(project_name).await?; + self.require_project_admin(project.id, user_id).await?; + + Self::validate_name(&request.name, super::MAX_CATEGORY_NAME_LEN)?; + + let position = if let Some(position) = request.position { + position + } else { + let max_position: Option> = room_category::Entity::find() + .filter(room_category::Column::Project.eq(project.id)) + .select_only() + .column_as(room_category::Column::Position.max(), "max_position") + .into_tuple::>() + .one(&self.db) + .await?; + max_position.flatten().unwrap_or(0) + 1 + }; + + let model = room_category::ActiveModel { + id: Set(Uuid::now_v7()), + project: Set(project.id), + name: Set(request.name), + position: Set(position), + created_by: Set(user_id), + created_at: Set(Utc::now()), + } + .insert(&self.db) + .await?; + + let event = ProjectRoomEvent { + event_type: super::RoomEventType::CategoryCreated.as_str().into(), + project_id: project.id, + room_id: None, + category_id: Some(model.id), + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(project.id, event) + .await; + + Ok(super::RoomCategoryResponse::from(model)) + } + + pub async fn room_category_update( + &self, + category_id: Uuid, + request: super::RoomCategoryUpdateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let model = room_category::Entity::find_by_id(category_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room category not found".to_string()))?; + self.require_project_admin(model.project, user_id).await?; + + let mut active: room_category::ActiveModel = model.into(); + if let Some(name) = request.name { + active.name = Set(name); + } + if let Some(position) = request.position { + active.position = Set(position); + } + let updated = active.update(&self.db).await?; + + let event = ProjectRoomEvent { + event_type: super::RoomEventType::CategoryUpdated.as_str().into(), + project_id: updated.project, + room_id: None, + category_id: Some(updated.id), + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(updated.project, event) + .await; + + Ok(super::RoomCategoryResponse::from(updated)) + } + + pub async fn room_category_delete( + &self, + category_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + let model = room_category::Entity::find_by_id(category_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room category not found".to_string()))?; + self.require_project_admin(model.project, user_id).await?; + let project_id = model.project; + + let txn = self.db.begin().await?; + + room::Entity::update_many() + .col_expr(room::Column::Category, Expr::value(None::)) + .filter(room::Column::Category.eq(category_id)) + .exec(&txn) + .await?; + + room_category::Entity::delete_by_id(category_id) + .exec(&txn) + .await?; + + txn.commit().await?; + + let event = ProjectRoomEvent { + event_type: super::RoomEventType::CategoryDeleted.as_str().into(), + project_id, + room_id: None, + category_id: Some(category_id), + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(project_id, event) + .await; + + Ok(()) + } +} diff --git a/libs/room/src/connection.rs b/libs/room/src/connection.rs new file mode 100644 index 0000000..4edbdf2 --- /dev/null +++ b/libs/room/src/connection.rs @@ -0,0 +1,998 @@ +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; +use std::thread; +use std::time::{Duration, Instant}; +use tokio::sync::{RwLock, broadcast}; +use uuid::Uuid; + +use db::database::AppDatabase; +use models::rooms::{MessageContentType, MessageSenderType, room_message}; +use queue::{AgentTaskEvent, ProjectRoomEvent, RoomMessageEnvelope, RoomMessageEvent, RoomMessageStreamChunkEvent}; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, Set}; + +use crate::error::RoomError; +use crate::metrics::RoomMetrics; +use crate::types::NotificationEvent; + +const BROADCAST_CAPACITY: usize = 10000; +const SHUTDOWN_CHANNEL_CAPACITY: usize = 16; +const CONNECTION_COOLDOWN: Duration = Duration::from_secs(30); +const MAX_CONNECTIONS_PER_ROOM: usize = 50000; +const MAX_CONNECTIONS_PER_PROJECT: usize = 50000; +const MAX_CONNECTIONS_PER_USER: usize = 50000; +const BATCH_SIZE: usize = 100; +const ROOM_IDLE_TIMEOUT: Duration = Duration::from_secs(30 * 60); + +pub struct RoomConnectionManager { + room_inner: RwLock>>>, + project_inner: RwLock>>>, + user_inner: RwLock>>>, + user_notification_inner: RwLock>>>, + /// Broadcast channel for agent task events per project. + task_inner: RwLock>>>, + pub metrics: Arc, + connection_rate: RwLock>, + shutdown_tx: broadcast::Sender<()>, + room_shutdown_txs: RwLock>>, + project_shutdown_txs: RwLock>>, + user_shutdown_txs: RwLock>>, + stream_inner: RwLock>>>, + room_stream_inner: RwLock>>>, + room_last_activity: RwLock>, + room_subscriber_count: RwLock>, + project_subscriber_count: RwLock>, + user_subscriber_count: RwLock>, +} + +impl RoomConnectionManager { + pub fn new(metrics: Arc) -> Self { + let (shutdown_tx, _) = broadcast::channel(SHUTDOWN_CHANNEL_CAPACITY); + Self { + #[allow(clippy::default_constructed_unit_structs)] + room_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + project_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + user_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + user_notification_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + task_inner: RwLock::new(HashMap::new()), + metrics, + #[allow(clippy::default_constructed_unit_structs)] + connection_rate: RwLock::new(HashMap::new()), + shutdown_tx, + #[allow(clippy::default_constructed_unit_structs)] + room_shutdown_txs: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + project_shutdown_txs: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + user_shutdown_txs: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + stream_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + room_stream_inner: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + room_last_activity: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + room_subscriber_count: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + project_subscriber_count: RwLock::new(HashMap::new()), + #[allow(clippy::default_constructed_unit_structs)] + user_subscriber_count: RwLock::new(HashMap::new()), + } + } + + pub async fn check_room_connection_rate( + &self, + room_id: Uuid, + user_id: Uuid, + ) -> Result<(), RoomError> { + let mut map = self.connection_rate.write().await; + let key = (room_id, user_id); + if let Some(last) = map.get(&key) { + if last.elapsed() < CONNECTION_COOLDOWN { + return Err(RoomError::RateLimited(format!( + "Connection cooldown active, retry in {}s", + CONNECTION_COOLDOWN.saturating_sub(last.elapsed()).as_secs() + ))); + } + } + map.insert(key, Instant::now()); + Ok(()) + } + + pub async fn check_project_connection_rate( + &self, + project_id: Uuid, + user_id: Uuid, + ) -> Result<(), RoomError> { + let mut map = self.connection_rate.write().await; + let key = (project_id, user_id); + if let Some(last) = map.get(&key) { + if last.elapsed() < CONNECTION_COOLDOWN { + return Err(RoomError::RateLimited(format!( + "Connection cooldown active, retry in {}s", + CONNECTION_COOLDOWN.saturating_sub(last.elapsed()).as_secs() + ))); + } + } + map.insert(key, Instant::now()); + Ok(()) + } + + pub async fn check_user_connection_rate(&self, user_id: Uuid) -> Result<(), RoomError> { + let mut map = self.connection_rate.write().await; + let key = (Uuid::nil(), user_id); + if let Some(last) = map.get(&key) { + if last.elapsed() < CONNECTION_COOLDOWN { + return Err(RoomError::RateLimited(format!( + "Connection cooldown active, retry in {}s", + CONNECTION_COOLDOWN.saturating_sub(last.elapsed()).as_secs() + ))); + } + } + map.insert(key, Instant::now()); + Ok(()) + } + + pub async fn cleanup_rate_limit(&self) { + let mut map = self.connection_rate.write().await; + map.retain(|_, instant| instant.elapsed() < CONNECTION_COOLDOWN * 2); + const MAX_RATE_ENTRIES: usize = MAX_CONNECTIONS_PER_ROOM * 10; + if map.len() > MAX_RATE_ENTRIES { + let mut entries: Vec<_> = map.iter().collect(); + entries.sort_by(|a, b| a.1.cmp(b.1)); + let keep_count = entries.len() / 2; + let to_remove: Vec<_> = entries + .into_iter() + .take(keep_count) + .map(|(k, _)| *k) + .collect(); + for key in to_remove { + map.remove(&key); + } + } + drop(map); + self.cleanup_idle_rooms().await; + } + + pub async fn cleanup_idle_rooms(&self) { + let now = Instant::now(); + let activity = self.room_last_activity.read().await; + let idle_room_ids: Vec = activity + .iter() + .filter(|(_, last_time)| now.duration_since(**last_time) > ROOM_IDLE_TIMEOUT) + .map(|(room_id, _)| *room_id) + .collect(); + drop(activity); + + if idle_room_ids.is_empty() { + return; + } + + { + let mut counts = self.room_subscriber_count.write().await; + let mut rooms = self.room_inner.write().await; + for room_id in &idle_room_ids { + if let Some(sender) = rooms.remove(room_id) { + let count = counts.remove(&room_id).unwrap_or(1); + self.metrics.users_online.decrement(count as f64); + drop(sender); + } + } + } + + { + let mut txs = self.room_shutdown_txs.write().await; + for room_id in &idle_room_ids { + txs.remove(room_id); + } + } + + { + let mut activity = self.room_last_activity.write().await; + for room_id in &idle_room_ids { + activity.remove(room_id); + } + } + } + + pub fn subscribe_shutdown(&self) -> broadcast::Receiver<()> { + self.shutdown_tx.subscribe() + } + + pub fn trigger_shutdown(&self) { + let _ = self.shutdown_tx.send(()); + } + + pub async fn register_room(&self, room_id: Uuid) -> broadcast::Receiver<()> { + let mut txs = self.room_shutdown_txs.write().await; + if let Some(tx) = txs.get(&room_id) { + return tx.subscribe(); + } + let (tx, rx) = broadcast::channel(SHUTDOWN_CHANNEL_CAPACITY); + txs.insert(room_id, tx); + rx + } + + pub async fn shutdown_room(&self, room_id: Uuid) { + { + let txs = self.room_shutdown_txs.read().await; + if let Some(tx) = txs.get(&room_id) { + let _ = tx.send(()); + } + } + { + let mut counts = self.room_subscriber_count.write().await; + let count = counts.remove(&room_id).unwrap_or(0) as f64; + if count > 0.0 { + self.metrics.users_online.decrement(count); + } + } + { + let mut map = self.room_inner.write().await; + map.remove(&room_id); + } + { + let mut txs = self.room_shutdown_txs.write().await; + txs.remove(&room_id); + } + } + + pub async fn prune_stale_rooms(&self, active_room_ids: &[Uuid]) { + let mut txs = self.room_shutdown_txs.write().await; + txs.retain(|room_id, _| active_room_ids.contains(room_id)); + drop(txs); + let mut counts = self.room_subscriber_count.write().await; + counts.retain(|room_id, _| active_room_ids.contains(room_id)); + } + + pub async fn register_project(&self, project_id: Uuid) -> broadcast::Receiver<()> { + let mut txs = self.project_shutdown_txs.write().await; + if let Some(tx) = txs.get(&project_id) { + return tx.subscribe(); + } + let (tx, rx) = broadcast::channel(SHUTDOWN_CHANNEL_CAPACITY); + txs.insert(project_id, tx); + rx + } + + pub async fn shutdown_project(&self, project_id: Uuid) { + { + let txs = self.project_shutdown_txs.read().await; + if let Some(tx) = txs.get(&project_id) { + let _ = tx.send(()); + } + } + { + let mut map = self.project_inner.write().await; + map.remove(&project_id); + } + { + let mut txs = self.project_shutdown_txs.write().await; + txs.remove(&project_id); + } + } + + pub async fn prune_stale_projects(&self, active_project_ids: &[Uuid]) { + let mut txs = self.project_shutdown_txs.write().await; + txs.retain(|project_id, _| active_project_ids.contains(project_id)); + } + + pub async fn register_user(&self, user_id: Uuid) -> broadcast::Receiver<()> { + let mut txs = self.user_shutdown_txs.write().await; + if let Some(tx) = txs.get(&user_id) { + return tx.subscribe(); + } + let (tx, rx) = broadcast::channel(SHUTDOWN_CHANNEL_CAPACITY); + txs.insert(user_id, tx); + rx + } + + pub async fn shutdown_user(&self, user_id: Uuid) { + { + let txs = self.user_shutdown_txs.read().await; + if let Some(tx) = txs.get(&user_id) { + let _ = tx.send(()); + } + } + { + let mut map = self.user_inner.write().await; + map.remove(&user_id); + } + { + let mut txs = self.user_shutdown_txs.write().await; + txs.remove(&user_id); + } + } + + pub async fn subscribe_user_notification( + &self, + user_id: Uuid, + ) -> broadcast::Receiver> { + let mut map = self.user_notification_inner.write().await; + + if let Some(sender) = map.get(&user_id) { + return sender.subscribe(); + } + + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(user_id, tx); + rx + } + + pub async fn unsubscribe_user_notification(&self, user_id: Uuid) { + let mut map = self.user_notification_inner.write().await; + map.remove(&user_id); + } + + pub async fn push_user_notification(&self, user_id: Uuid, event: Arc) { + let map = self.user_notification_inner.read().await; + if let Some(sender) = map.get(&user_id) { + let _ = sender.send(event); + } + } + + pub async fn subscribe( + &self, + room_id: Uuid, + _user_id: Uuid, + ) -> Result>, RoomError> { + let mut map = self.room_inner.write().await; + if let Some(_sender) = map.get(&room_id) { + drop(map); + let mut counts = self.room_subscriber_count.write().await; + *counts.entry(room_id).or_insert(0) += 1; + let map = self.room_inner.read().await; + if let Some(sender) = map.get(&room_id) { + return Ok(sender.subscribe()); + } + return Err(RoomError::Internal( + "room disappeared during subscribe".into(), + )); + } + + if map.len() >= MAX_CONNECTIONS_PER_ROOM { + return Err(RoomError::RateLimited(format!( + "Room connection limit reached ({})", + MAX_CONNECTIONS_PER_ROOM + ))); + } + + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(room_id, tx); + drop(map); + let mut counts = self.room_subscriber_count.write().await; + counts.insert(room_id, 1); + self.metrics.users_online.increment(1.0); + Ok(rx) + } + + pub async fn unsubscribe(&self, room_id: Uuid, _user_id: Uuid) { + let mut counts = self.room_subscriber_count.write().await; + let count = counts.entry(room_id).or_insert(0); + if *count > 0 { + *count -= 1; + self.metrics.users_online.decrement(1.0); + } + if *count == 0 { + counts.remove(&room_id); + drop(counts); + let mut map = self.room_inner.write().await; + map.remove(&room_id); + } + } + + pub async fn broadcast(&self, room_id: Uuid, event: RoomMessageEvent) { + { + let mut activity = self.room_last_activity.write().await; + activity.insert(room_id, Instant::now()); + } + + let map = self.room_inner.read().await; + if let Some(sender) = map.get(&room_id) { + let event = Arc::new(event); + if sender.send(event).is_err() { + self.metrics.broadcasts_dropped.increment(1); + } + } + } + + pub async fn subscribe_project( + &self, + project_id: Uuid, + _user_id: Uuid, + ) -> Result>, RoomError> { + let mut map = self.project_inner.write().await; + if map.get(&project_id).is_some() { + drop(map); + let mut counts = self.project_subscriber_count.write().await; + *counts.entry(project_id).or_insert(0) += 1; + let map = self.project_inner.read().await; + if let Some(sender) = map.get(&project_id) { + return Ok(sender.subscribe()); + } + return Err(RoomError::Internal("project channel disappeared".into())); + } + + if map.len() >= MAX_CONNECTIONS_PER_PROJECT { + return Err(RoomError::RateLimited(format!( + "Project connection limit reached ({})", + MAX_CONNECTIONS_PER_PROJECT + ))); + } + + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(project_id, tx); + drop(map); + let mut counts = self.project_subscriber_count.write().await; + counts.insert(project_id, 1); + Ok(rx) + } + + pub async fn unsubscribe_project(&self, project_id: Uuid, _user_id: Uuid) { + let mut counts = self.project_subscriber_count.write().await; + let count = counts.entry(project_id).or_insert(0); + if *count > 0 { + *count -= 1; + } + if *count == 0 { + counts.remove(&project_id); + drop(counts); + let mut map = self.project_inner.write().await; + map.remove(&project_id); + } + } + + pub async fn broadcast_project(&self, project_id: Uuid, event: ProjectRoomEvent) { + let map = self.project_inner.read().await; + if let Some(sender) = map.get(&project_id) { + let event = Arc::new(event); + if sender.send(event).is_err() { + self.metrics.broadcasts_dropped.increment(1); + } + } + } + + /// Broadcast an agent task event to all WS clients subscribed to this project. + pub async fn broadcast_agent_task(&self, project_id: Uuid, event: AgentTaskEvent) { + let map = self.task_inner.read().await; + if let Some(sender) = map.get(&project_id) { + let event = Arc::new(event); + if sender.send(event).is_err() { + self.metrics.broadcasts_dropped.increment(1); + } + } + } + + /// Subscribe to agent task events for a project. + /// Returns a broadcast receiver that yields task events as they occur. + pub async fn subscribe_task_events( + &self, + project_id: Uuid, + ) -> Result>, RoomError> { + let mut map = self.task_inner.write().await; + + if let Some(sender) = map.get(&project_id).cloned() { + drop(map); + return Ok(sender.subscribe()); + } + + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(project_id, tx); + Ok(rx) + } + + pub async fn subscribe_user( + &self, + user_id: Uuid, + ) -> Result>, RoomError> { + let mut map = self.user_inner.write().await; + + if let Some(_sender) = map.get(&user_id) { + drop(map); + let mut counts = self.user_subscriber_count.write().await; + *counts.entry(user_id).or_insert(0) += 1; + let map = self.user_inner.read().await; + if let Some(sender) = map.get(&user_id) { + return Ok(sender.subscribe()); + } + return Err(RoomError::Internal("user channel disappeared".into())); + } + + if map.len() >= MAX_CONNECTIONS_PER_USER { + return Err(RoomError::RateLimited(format!( + "User connection limit reached ({})", + MAX_CONNECTIONS_PER_USER + ))); + } + + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(user_id, tx); + drop(map); + let mut counts = self.user_subscriber_count.write().await; + counts.insert(user_id, 1); + self.metrics.users_online.increment(1.0); + Ok(rx) + } + + pub async fn unsubscribe_user(&self, user_id: Uuid) { + let mut counts = self.user_subscriber_count.write().await; + let count = counts.entry(user_id).or_insert(0); + if *count > 0 { + *count -= 1; + self.metrics.users_online.decrement(1.0); + } + if *count == 0 { + counts.remove(&user_id); + drop(counts); + let mut map = self.user_inner.write().await; + map.remove(&user_id); + } + } + + pub async fn broadcast_to_user(&self, user_id: Uuid, event: ProjectRoomEvent) { + let map = self.user_inner.read().await; + if let Some(sender) = map.get(&user_id) { + let event = Arc::new(event); + if sender.send(event).is_err() { + self.metrics.broadcasts_dropped.increment(1); + } + } + } + + pub async fn register_stream_channel( + &self, + message_id: Uuid, + ) -> broadcast::Receiver> { + let mut map = self.stream_inner.write().await; + if let Some(tx) = map.get(&message_id) { + return tx.subscribe(); + } + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(message_id, tx); + rx + } + + pub async fn subscribe_stream( + &self, + message_id: Uuid, + ) -> Option>> { + let map = self.stream_inner.read().await; + map.get(&message_id).map(|tx| tx.subscribe()) + } + + pub async fn subscribe_room_stream( + &self, + room_id: Uuid, + ) -> broadcast::Receiver> { + let mut map = self.room_stream_inner.write().await; + if let Some(tx) = map.get(&room_id) { + return tx.subscribe(); + } + let (tx, rx) = broadcast::channel(BROADCAST_CAPACITY); + map.insert(room_id, tx); + rx + } + + pub async fn broadcast_stream_chunk(&self, event: RoomMessageStreamChunkEvent) { + let event = Arc::new(event); + let is_final_chunk = event.done; + + let map = self.stream_inner.read().await; + if let Some(tx) = map.get(&event.message_id) { + let _ = tx.send(Arc::clone(&event)); + } + + drop(map); + let map = self.room_stream_inner.read().await; + if let Some(tx) = map.get(&event.room_id) { + let _ = tx.send(Arc::clone(&event)); + } + + if is_final_chunk { + drop(map); + let mut map = self.room_stream_inner.write().await; + map.remove(&event.room_id); + } + } + + pub async fn close_stream_channel(&self, message_id: Uuid) { + let mut map = self.stream_inner.write().await; + map.remove(&message_id); + } +} + +fn parse_sender_type(s: &str) -> MessageSenderType { + match s { + "member" => MessageSenderType::Member, + "admin" => MessageSenderType::Admin, + "owner" => MessageSenderType::Owner, + "ai" => MessageSenderType::Ai, + "system" => MessageSenderType::System, + "tool" => MessageSenderType::Tool, + "guest" => MessageSenderType::Guest, + _ => MessageSenderType::Member, + } +} + +fn parse_content_type(s: &str) -> MessageContentType { + match s { + "text" => MessageContentType::Text, + "image" => MessageContentType::Image, + "audio" => MessageContentType::Audio, + "video" => MessageContentType::Video, + "file" => MessageContentType::File, + _ => MessageContentType::Text, + } +} + +pub type PersistFn = Arc< + dyn Fn(Vec) -> Pin> + Send>> + + Send + + Sync, +>; + +use dashmap::DashMap; + +pub type DedupCache = Arc>; + +const DEDUP_CACHE_TTL: Duration = Duration::from_secs(300); + +pub fn cleanup_dedup_cache(cache: &DedupCache) { + let cutoff = Instant::now() - DEDUP_CACHE_TTL; + cache.retain(|_, inserted_at| *inserted_at > cutoff); +} + +pub fn make_persist_fn( + db: AppDatabase, + metrics: Arc, + dedup_cache: DedupCache, +) -> PersistFn { + Arc::new(move |envelopes: Vec| { + let db = db.clone(); + let metrics = metrics.clone(); + let cache = dedup_cache.clone(); + Box::pin(async move { + for chunk in envelopes.chunks(BATCH_SIZE) { + let mut models_to_insert = Vec::new(); + let mut ids_to_dedup: Vec = Vec::new(); + + for env in chunk { + if cache.contains_key(&env.id) { + metrics.incr_duplicates_skipped(); + continue; + } + ids_to_dedup.push(env.id); + } + + let existing_ids: std::collections::HashSet = + if !ids_to_dedup.is_empty() { + room_message::Entity::find() + .filter(room_message::Column::Id.is_in(ids_to_dedup)) + .into_model::() + .all(&db) + .await? + .into_iter() + .map(|m| m.id) + .collect() + } else { + std::collections::HashSet::new() + }; + + for env in chunk { + if cache.contains_key(&env.id) { + continue; + } + cache.insert(env.id, Instant::now()); + + if existing_ids.contains(&env.id) { + metrics.incr_duplicates_skipped(); + continue; + } + + let sender_type = parse_sender_type(&env.sender_type); + let content_type = parse_content_type(&env.content_type); + + models_to_insert.push(room_message::ActiveModel { + id: Set(env.id), + seq: Set(env.seq), + room: Set(env.room_id), + sender_type: Set(sender_type), + sender_id: Set(env.sender_id), + thread: Set(env.thread_id), + content: Set(env.content.clone()), + content_type: Set(content_type), + edited_at: Set(None), + send_at: Set(env.send_at.clone()), + revoked: Set(None), + revoked_by: Set(None), + in_reply_to: Set(env.in_reply_to), + }); + } + + if !models_to_insert.is_empty() { + let count = models_to_insert.len() as u64; + room_message::Entity::insert_many(models_to_insert) + .exec(&db) + .await?; + metrics.messages_persisted.increment(count); + } + } + Ok(()) + }) + }) +} + +pub type RedisFuture = + Pin> + Send>>; + +pub fn extract_get_redis( + producer: queue::MessageProducer, +) -> Arc RedisFuture + Send + Sync> { + Arc::new(move || { + let get_redis_fn = producer.get_redis.clone(); + Box::pin(async move { + let handle = get_redis_fn(); + match handle.await { + Ok(conn) => conn, + Err(_) => anyhow::bail!("redis pool task panicked"), + } + }) as RedisFuture + }) +} + +fn start_pubsub_thread( + redis_url: String, + channel: String, + relay_tx: tokio::sync::mpsc::Sender>, + mut shutdown_rx: broadcast::Receiver<()>, + log: slog::Logger, + _on_msg: F, +) where + F: Fn(Vec) -> Fut + Send + Sync + 'static, + Fut: Future + Send, +{ + thread::Builder::new() + .name(format!("redis-pubsub-{}", &channel[..channel.len().min(32)])) + .spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("pubsub thread runtime"); + rt.block_on(async { + let redis_url = redis_url.clone(); + loop { + if shutdown_rx.try_recv().is_ok() { + slog::info!(log, "pubsub thread shutting down before connect"; "channel" => %channel); + break; + } + + let client = match redis::Client::open(redis_url.as_str()) { + Ok(c) => c, + Err(e) => { + slog::error!(log, "pubsub redis client open failed"; "channel" => %channel, "error" => %e); + thread::sleep(Duration::from_secs(1)); + continue; + } + }; + + let mut pubsub = match client.get_async_pubsub().await { + Ok(p) => p, + Err(e) => { + slog::error!(log, "pubsub connection failed"; "channel" => %channel, "error" => %e); + thread::sleep(Duration::from_secs(1)); + continue; + } + }; + + match pubsub.subscribe(&channel).await { + Ok(_) => slog::info!(log, "pubsub subscribed"; "channel" => %channel), + Err(e) => { + slog::error!(log, "pubsub subscribe failed"; "channel" => %channel, "error" => %e); + thread::sleep(Duration::from_secs(1)); + continue; + } + } + + let mut stream = pubsub.on_message(); + + loop { + if shutdown_rx.try_recv().is_ok() { + slog::info!(log, "pubsub thread shutting down"; "channel" => %channel); + return; + } + + let msg = tokio::time::timeout( + Duration::from_millis(500), + futures::StreamExt::next(&mut stream), + ) + .await; + + match msg { + Ok(Some(msg)) => { + let payload = msg.get_payload_bytes(); + slog::debug!(log, "pubsub received"; "channel" => %channel, "len" => payload.len()); + if relay_tx.send(payload.to_vec()).await.is_err() { + slog::warn!(log, "pubsub relay channel closed"; "channel" => %channel); + return; + } + } + Ok(None) => { + slog::warn!(log, "pubsub stream ended, will reconnect"; "channel" => %channel); + break; + } + Err(_) => {} + } + } + + slog::warn!(log, "pubsub connection lost, reconnecting"; "channel" => %channel); + } + }); + }) + .expect("pubsub thread spawn"); +} + +pub async fn subscribe_room_events( + redis_url: String, + manager: Arc, + room_id: Uuid, + log: slog::Logger, + mut shutdown_rx: broadcast::Receiver<()>, +) { + let channel = format!("room:pub:{}", room_id); + let (tx, mut rx) = tokio::sync::mpsc::channel::>(1024); + + slog::info!(log, "starting room pubsub subscriber"; "room_id" => %room_id, "channel" => %channel); + + let thread_log = log.clone(); + let thread_channel = channel.clone(); + let thread_shutdown = shutdown_rx.resubscribe(); + start_pubsub_thread( + redis_url, + thread_channel, + tx, + thread_shutdown, + thread_log, + |_| async {}, + ); + + loop { + tokio::select! { + _ = shutdown_rx.recv() => { + slog::info!(log, "room subscriber shutting down"; "room_id" => %room_id); + break; + } + payload = rx.recv() => { + match payload { + Some(data) => { + match serde_json::from_slice::(&data) { + Ok(event) => { + manager.broadcast(room_id, event).await; + } + Err(e) => { + slog::warn!(log, "malformed RoomMessageEvent"; "error" => %e); + } + } + } + None => { + slog::warn!(log, "pubsub relay channel closed"; "room_id" => %room_id); + break; + } + } + } + } + } + slog::info!(log, "room subscriber stopped"; "room_id" => %room_id); +} + +pub async fn subscribe_project_room_events( + redis_url: String, + manager: Arc, + project_id: Uuid, + log: slog::Logger, + mut shutdown_rx: broadcast::Receiver<()>, +) { + let channel = format!("project:pub:{}", project_id); + let (tx, mut rx) = tokio::sync::mpsc::channel::>(1024); + + slog::info!(log, "starting project pubsub subscriber"; "project_id" => %project_id, "channel" => %channel); + + let thread_log = log.clone(); + let thread_channel = channel.clone(); + let thread_shutdown = shutdown_rx.resubscribe(); + start_pubsub_thread( + redis_url, + thread_channel, + tx, + thread_shutdown, + thread_log, + |_| async {}, + ); + + loop { + tokio::select! { + _ = shutdown_rx.recv() => { + slog::info!(log, "project subscriber shutting down"; "project_id" => %project_id); + break; + } + payload = rx.recv() => { + match payload { + Some(data) => { + match serde_json::from_slice::(&data) { + Ok(event) => { + manager.broadcast_project(project_id, event).await; + } + Err(e) => { + slog::warn!(log, "malformed ProjectRoomEvent"; "error" => %e); + } + } + } + None => { + slog::warn!(log, "project pubsub relay channel closed"; "project_id" => %project_id); + break; + } + } + } + } + } + slog::info!(log, "project subscriber stopped"; "project_id" => %project_id); +} + +/// Subscribe to Redis Pub/Sub `task:pub:{project_id}` and relay events to +/// `RoomConnectionManager::broadcast_agent_task()` so all WS clients get notified. +pub async fn subscribe_task_events_fn( + redis_url: String, + manager: Arc, + project_id: Uuid, + log: slog::Logger, + mut shutdown_rx: broadcast::Receiver<()>, +) { + let channel = format!("task:pub:{}", project_id); + let (tx, mut rx) = tokio::sync::mpsc::channel::>(1024); + + slog::info!(log, "starting task pubsub subscriber"; "project_id" => %project_id, "channel" => %channel); + + let thread_log = log.clone(); + let thread_channel = channel.clone(); + let thread_shutdown = shutdown_rx.resubscribe(); + start_pubsub_thread( + redis_url, + thread_channel, + tx, + thread_shutdown, + thread_log, + |_| async {}, + ); + + loop { + tokio::select! { + _ = shutdown_rx.recv() => { + slog::info!(log, "task subscriber shutting down"; "project_id" => %project_id); + break; + } + payload = rx.recv() => { + match payload { + Some(data) => { + match serde_json::from_slice::(&data) { + Ok(event) => { + manager.broadcast_agent_task(project_id, event).await; + } + Err(e) => { + slog::warn!(log, "malformed AgentTaskEvent"; "error" => %e); + } + } + } + None => { + slog::warn!(log, "task pubsub relay channel closed"; "project_id" => %project_id); + break; + } + } + } + } + } + slog::info!(log, "task subscriber stopped"; "project_id" => %project_id); +} diff --git a/libs/room/src/draft_and_history.rs b/libs/room/src/draft_and_history.rs new file mode 100644 index 0000000..d8038ae --- /dev/null +++ b/libs/room/src/draft_and_history.rs @@ -0,0 +1,198 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::NotificationType; +use models::rooms::room_message_edit_history; +use models::users::user as user_model; +use sea_orm::*; +use uuid::Uuid; + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct MessageEditHistoryEntry { + pub old_content: String, + pub new_content: String, + pub edited_at: chrono::DateTime, +} + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct MessageEditHistoryResponse { + pub message_id: Uuid, + pub history: Vec, + pub total_edits: i64, +} + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct MentionNotificationResponse { + pub message_id: Uuid, + pub mentioned_by: Uuid, + pub mentioned_by_name: String, + pub content_preview: String, + pub room_id: Uuid, + pub room_name: String, + pub created_at: chrono::DateTime, +} + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct DraftResponse { + pub room_id: Uuid, + pub content: String, + pub saved_at: chrono::DateTime, +} + +#[derive(Debug, Clone, serde::Deserialize)] +pub struct DraftSaveRequest { + pub content: String, +} + +impl RoomService { + pub async fn save_message_edit_history( + &self, + message_id: Uuid, + user_id: Uuid, + old_content: String, + new_content: String, + ) -> Result<(), RoomError> { + let history = room_message_edit_history::ActiveModel { + id: Set(Uuid::now_v7()), + message: Set(message_id), + user: Set(user_id), + old_content: Set(old_content), + new_content: Set(new_content), + edited_at: Set(Utc::now()), + }; + + history.insert(&self.db).await?; + + Ok(()) + } + + pub async fn get_message_edit_history( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + + let message = models::rooms::room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + + self.require_room_member(message.room, user_id).await?; + + let history = room_message_edit_history::Entity::find() + .filter(room_message_edit_history::Column::Message.eq(message_id)) + .order_by_asc(room_message_edit_history::Column::EditedAt) + .all(&self.db) + .await?; + + let total_edits = history.len() as i64; + let history_entries = history + .into_iter() + .map(|h| MessageEditHistoryEntry { + old_content: h.old_content, + new_content: h.new_content, + edited_at: h.edited_at, + }) + .collect(); + + Ok(MessageEditHistoryResponse { + message_id, + history: history_entries, + total_edits, + }) + } + + pub async fn get_mention_notifications( + &self, + limit: Option, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + + let limit = limit.unwrap_or(50); + + let notifications = models::rooms::room_notifications::Entity::find() + .filter(models::rooms::room_notifications::Column::UserId.eq(user_id)) + .filter( + models::rooms::room_notifications::Column::NotificationType + .eq(NotificationType::Mention), + ) + .order_by_desc(models::rooms::room_notifications::Column::CreatedAt) + .limit(limit) + .all(&self.db) + .await?; + + let mut result = Vec::new(); + for notification in notifications { + let mentioned_by = + user_model::Entity::find_by_id(notification.related_user_id.unwrap_or_default()) + .one(&self.db) + .await?; + + let room_name = if let Some(room_id) = notification.room { + models::rooms::room::Entity::find_by_id(room_id) + .one(&self.db) + .await? + .map(|r| r.room_name) + .unwrap_or_else(|| "Unknown Room".to_string()) + } else { + "Unknown Room".to_string() + }; + + let mentioned_by_name = mentioned_by + .map(|u| u.display_name.unwrap_or(u.username)) + .unwrap_or_else(|| "Unknown User".to_string()); + + let content_preview = notification + .content + .unwrap_or_default() + .chars() + .take(100) + .collect(); + + result.push(MentionNotificationResponse { + message_id: notification.related_message_id.unwrap_or_default(), + mentioned_by: notification.related_user_id.unwrap_or_default(), + mentioned_by_name, + content_preview, + room_id: notification.room.unwrap_or_default(), + room_name, + created_at: notification.created_at, + }); + } + + Ok(result) + } + + pub async fn mark_mention_notifications_read( + &self, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + + use sea_orm::sea_query::Expr; + + let now = Utc::now(); + models::rooms::room_notifications::Entity::update_many() + .col_expr( + models::rooms::room_notifications::Column::IsRead, + Expr::value(true), + ) + .col_expr( + models::rooms::room_notifications::Column::ReadAt, + Expr::value(Some(now)), + ) + .filter(models::rooms::room_notifications::Column::UserId.eq(user_id)) + .filter( + models::rooms::room_notifications::Column::NotificationType + .eq(NotificationType::Mention), + ) + .filter(models::rooms::room_notifications::Column::IsRead.eq(false)) + .exec(&self.db) + .await?; + + Ok(()) + } +} diff --git a/libs/room/src/error.rs b/libs/room/src/error.rs new file mode 100644 index 0000000..dd069ff --- /dev/null +++ b/libs/room/src/error.rs @@ -0,0 +1,34 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum RoomError { + #[error("Database error: {0}")] + Database(#[from] sea_orm::DbErr), + + #[error("Not found: {0}")] + NotFound(String), + + #[error("Unauthorized")] + Unauthorized, + + #[error("No power / permission denied")] + NoPower, + + #[error("Rate limited: {0}")] + RateLimited(String), + + #[error("Bad request: {0}")] + BadRequest(String), + + #[error("Role parse error")] + RoleParseError, + + #[error("Internal error: {0}")] + Internal(String), +} + +impl From for RoomError { + fn from(e: anyhow::Error) -> Self { + RoomError::Internal(e.to_string()) + } +} diff --git a/libs/room/src/helpers.rs b/libs/room/src/helpers.rs new file mode 100644 index 0000000..f576830 --- /dev/null +++ b/libs/room/src/helpers.rs @@ -0,0 +1,451 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use models::agents::model as ai_model; +use models::projects::{MemberRole, project, project_history_name, project_members}; +use models::rooms::{ + MessageContentType, RoomMemberRole, room, room_ai, room_category, room_member, room_message, + room_notifications, room_pin, room_thread, +}; +use models::users::user as user_model; +use sea_orm::*; +use uuid::Uuid; + +impl From for super::RoomCategoryResponse { + fn from(value: room_category::Model) -> Self { + Self { + id: value.id, + project: value.project, + name: value.name, + position: value.position, + created_by: value.created_by, + created_at: value.created_at, + } + } +} + +impl From for super::RoomResponse { + fn from(value: room::Model) -> Self { + Self { + id: value.id, + project: value.project, + room_name: value.room_name, + public: value.public, + category: value.category, + created_by: value.created_by, + created_at: value.created_at, + last_msg_at: value.last_msg_at, + unread_count: 0, + } + } +} + +impl From for super::RoomMemberResponse { + fn from(value: room_member::Model) -> Self { + Self { + room: value.room, + user: value.user, + user_info: None, + role: value.role.to_string(), + first_msg_in: value.first_msg_in, + joined_at: value.joined_at, + last_read_seq: value.last_read_seq, + do_not_disturb: value.do_not_disturb, + dnd_start_hour: value.dnd_start_hour, + dnd_end_hour: value.dnd_end_hour, + } + } +} + +impl From for super::RoomMessageResponse { + fn from(value: room_message::Model) -> Self { + Self { + id: value.id, + seq: value.seq, + room: value.room, + sender_type: value.sender_type.to_string(), + sender_id: value.sender_id, + display_name: None, + thread: value.thread, + content: value.content, + content_type: value.content_type.to_string(), + edited_at: value.edited_at, + send_at: value.send_at, + revoked: value.revoked, + revoked_by: value.revoked_by, + in_reply_to: value.in_reply_to, + } + } +} + +impl From for super::RoomThreadResponse { + fn from(value: room_thread::Model) -> Self { + Self { + id: value.id, + room: value.room, + parent: value.parent, + created_by: value.created_by, + participants: value.participants, + last_message_at: value.last_message_at, + last_message_preview: value.last_message_preview, + created_at: value.created_at, + updated_at: value.updated_at, + } + } +} + +impl From for super::RoomPinResponse { + fn from(value: room_pin::Model) -> Self { + Self { + room: value.room, + message: value.message, + pinned_by: value.pinned_by, + pinned_at: value.pinned_at, + } + } +} + +impl From for super::RoomAiResponse { + fn from(value: room_ai::Model) -> Self { + Self { + room: value.room, + model: value.model, + version: value.version, + call_count: value.call_count, + last_call_at: value.last_call_at, + history_limit: value.history_limit, + system_prompt: value.system_prompt, + temperature: value.temperature, + max_tokens: value.max_tokens, + use_exact: value.use_exact, + think: value.think, + stream: value.stream, + min_score: value.min_score, + created_at: value.created_at, + updated_at: value.updated_at, + } + } +} + +impl From for super::NotificationResponse { + fn from(value: room_notifications::Model) -> Self { + Self { + id: value.id, + room: value.room, + project: value.project, + user_id: value.user_id, + user_info: None, + notification_type: value.notification_type.to_string(), + title: value.title, + content: value.content, + related_message_id: value.related_message_id, + related_user_id: value.related_user_id, + related_room_id: value.related_room_id, + metadata: value.metadata.unwrap_or(serde_json::json!({})), + is_read: value.is_read, + is_archived: value.is_archived, + created_at: value.created_at, + read_at: value.read_at, + expires_at: value.expires_at, + } + } +} + +impl RoomService { + pub(crate) fn parse_room_member_role(role: &str) -> Result { + match role { + "owner" => Ok(RoomMemberRole::Owner), + "admin" => Ok(RoomMemberRole::Admin), + "member" => Ok(RoomMemberRole::Member), + "guest" => Ok(RoomMemberRole::Guest), + _ => Err(RoomError::BadRequest("invalid room role".to_string())), + } + } + + pub(crate) fn parse_message_content_type( + content_type: Option, + ) -> Result { + match content_type + .unwrap_or_else(|| "text".to_string()) + .to_lowercase() + .as_str() + { + "text" => Ok(MessageContentType::Text), + "image" => Ok(MessageContentType::Image), + "audio" => Ok(MessageContentType::Audio), + "video" => Ok(MessageContentType::Video), + "file" => Ok(MessageContentType::File), + _ => Err(RoomError::BadRequest( + "invalid message content_type".to_string(), + )), + } + } + + pub(crate) async fn find_room_member( + &self, + room_id: Uuid, + user_id: Uuid, + ) -> Result, RoomError> { + room_member::Entity::find_by_id((room_id, user_id)) + .one(&self.db) + .await + .map_err(RoomError::from) + } + + pub(crate) async fn require_room_member_model( + &self, + room_id: Uuid, + user_id: Uuid, + ) -> Result { + self.find_room_member(room_id, user_id) + .await? + .ok_or(RoomError::NoPower) + } + + pub(crate) fn is_room_admin(role: &RoomMemberRole) -> bool { + matches!(role, RoomMemberRole::Owner | RoomMemberRole::Admin) + } + + pub(crate) async fn require_room_admin( + &self, + room_id: Uuid, + user_id: Uuid, + ) -> Result { + let member = self.require_room_member_model(room_id, user_id).await?; + if Self::is_room_admin(&member.role) { + Ok(member) + } else { + Err(RoomError::NoPower) + } + } + + pub(crate) async fn require_project_admin( + &self, + project_id: Uuid, + user_id: Uuid, + ) -> Result { + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_id)) + .filter(project_members::Column::User.eq(user_id)) + .one(&self.db) + .await? + .ok_or(RoomError::NoPower)?; + let role = member.scope_role().map_err(|_| RoomError::RoleParseError)?; + if matches!(role, MemberRole::Owner | MemberRole::Admin) { + Ok(member) + } else { + Err(RoomError::NoPower) + } + } + + pub(crate) async fn ensure_room_visible_for_user( + &self, + room: &room::Model, + user_id: Uuid, + ) -> Result<(), RoomError> { + if self.find_room_member(room.id, user_id).await?.is_some() { + return Ok(()); + } + let project_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(room.project)) + .filter(project_members::Column::User.eq(user_id)) + .one(&self.db) + .await?; + if room.public && project_member.is_some() { + Ok(()) + } else { + Err(RoomError::NoPower) + } + } + + pub(crate) async fn next_room_message_seq( + &self, + room_id: Uuid, + db: &C, + ) -> Result + where + C: ConnectionTrait, + { + let max_seq: Option> = room_message::Entity::find() + .filter(room_message::Column::Room.eq(room_id)) + .select_only() + .column_as(room_message::Column::Seq.max(), "max_seq") + .into_tuple::>() + .one(db) + .await?; + Ok(max_seq.flatten().unwrap_or(0) + 1) + } + + pub async fn utils_find_project_by_name( + &self, + name: String, + ) -> Result { + match project::Entity::find() + .filter(project::Column::Name.eq(name.clone())) + .one(&self.db) + .await + .ok() + .flatten() + { + Some(project) => Ok(project), + None => match project_history_name::Entity::find() + .filter(project_history_name::Column::HistoryName.eq(name)) + .one(&self.db) + .await + .ok() + .flatten() + { + Some(project) => self.utils_find_project_by_uid(project.project_uid).await, + None => Err(RoomError::NotFound("Project not found".to_string())), + }, + } + } + + pub async fn utils_find_project_by_uid(&self, uid: Uuid) -> Result { + project::Entity::find_by_id(uid) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or_else(|| RoomError::NotFound("Project not found".to_string())) + } + + pub async fn check_project_access( + &self, + project_uid: Uuid, + user_uid: Uuid, + ) -> Result<(), RoomError> { + let project = project::Entity::find_by_id(project_uid) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or_else(|| RoomError::NotFound("Project not found".to_string()))?; + + if project.is_public { + return Ok(()); + } + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_uid)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + if member.is_some() { + Ok(()) + } else { + Err(RoomError::NoPower) + } + } + + pub(crate) fn validate_name(name: &str, max_len: usize) -> Result<(), RoomError> { + if name.trim().is_empty() { + return Err(RoomError::BadRequest("name cannot be empty".to_string())); + } + if name.len() > max_len { + return Err(RoomError::BadRequest(format!( + "name exceeds maximum length of {} characters", + max_len + ))); + } + Ok(()) + } + + pub(crate) fn validate_content(content: &str, max_len: usize) -> Result<(), RoomError> { + if content.trim().is_empty() { + return Err(RoomError::BadRequest("content cannot be empty".to_string())); + } + if content.len() > max_len { + return Err(RoomError::BadRequest(format!( + "content exceeds maximum length of {} characters", + max_len + ))); + } + Ok(()) + } + + pub(crate) fn sanitize_content(content: &str) -> String { + use std::sync::LazyLock; + + static SCRIPT_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)]*>.*?").unwrap()); + static STYLE_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)]*>.*?").unwrap()); + static ONERROR_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)\bonerror\s*=").unwrap()); + static ONLOAD_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)\bonload\s*=").unwrap()); + static ONCLICK_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)\bonclick\s*=").unwrap()); + static ONMOUSEOVER_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)\bonmouseover\s*=").unwrap()); + static JAVASCRIPT_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)javascript:").unwrap()); + static DATA_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"(?i)data:").unwrap()); + + let mut result = content.to_string(); + result = SCRIPT_RE.replace_all(&result, "").to_string(); + result = STYLE_RE.replace_all(&result, "").to_string(); + result = ONERROR_RE.replace_all(&result, "blocked=").to_string(); + result = ONLOAD_RE.replace_all(&result, "blocked=").to_string(); + result = ONCLICK_RE.replace_all(&result, "blocked=").to_string(); + result = ONMOUSEOVER_RE.replace_all(&result, "blocked=").to_string(); + result = JAVASCRIPT_RE.replace_all(&result, "blocked:").to_string(); + result = DATA_RE.replace_all(&result, "blocked:").to_string(); + + result + } + + pub async fn resolve_display_name( + &self, + msg: room_message::Model, + _room_id: Uuid, + ) -> super::RoomMessageResponse { + let sender_type = msg.sender_type.to_string(); + let display_name = match sender_type.as_str() { + "ai" => { + if let Some(sender_id) = msg.sender_id { + ai_model::Entity::find_by_id(sender_id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|m| m.name) + } else { + None + } + } + _ => { + if let Some(sender_id) = msg.sender_id { + let user = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(sender_id)) + .one(&self.db) + .await + .ok() + .flatten(); + user.map(|u| u.display_name.unwrap_or_else(|| u.username)) + } else { + None + } + } + }; + + super::RoomMessageResponse { + id: msg.id, + seq: msg.seq, + room: msg.room, + sender_type, + sender_id: msg.sender_id, + display_name, + thread: msg.thread, + content: msg.content, + content_type: msg.content_type.to_string(), + edited_at: msg.edited_at, + send_at: msg.send_at, + revoked: msg.revoked, + revoked_by: msg.revoked_by, + in_reply_to: msg.in_reply_to, + } + } +} diff --git a/libs/room/src/lib.rs b/libs/room/src/lib.rs new file mode 100644 index 0000000..9738740 --- /dev/null +++ b/libs/room/src/lib.rs @@ -0,0 +1,34 @@ +pub mod ai; +pub mod category; +pub mod connection; +pub mod draft_and_history; +pub mod error; +pub mod helpers; +pub mod member; +pub mod message; +pub mod metrics; +pub mod notification; +pub mod pin; +pub mod reaction; +pub mod room; +pub mod room_ai_queue; +pub mod search; +pub mod service; +pub mod thread; +pub mod types; +pub mod ws_context; + +pub use connection::{ + PersistFn, RedisFuture, RoomConnectionManager, cleanup_dedup_cache, extract_get_redis, + make_persist_fn, subscribe_project_room_events, subscribe_room_events, + subscribe_task_events_fn, +}; +pub use draft_and_history::{ + DraftResponse, DraftSaveRequest, MentionNotificationResponse, MessageEditHistoryEntry, + MessageEditHistoryResponse, +}; +pub use error::RoomError; +pub use metrics::RoomMetrics; +pub use reaction::{MessageReactionsResponse, MessageSearchResponse}; +pub use service::RoomService; +pub use types::{RoomEventType, *}; diff --git a/libs/room/src/member.rs b/libs/room/src/member.rs new file mode 100644 index 0000000..cbffb79 --- /dev/null +++ b/libs/room/src/member.rs @@ -0,0 +1,370 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::projects::project_members; +use models::rooms::{RoomMemberRole, room_member}; +use models::users::user as user_model; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_member_list( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let members = room_member::Entity::find() + .filter(room_member::Column::Room.eq(room_id)) + .all(&self.db) + .await?; + + let user_ids: Vec = members.iter().map(|m| m.user).collect(); + let users: std::collections::HashMap = if !user_ids.is_empty() { + use sea_orm::ColumnTrait; + user_model::Entity::find() + .filter(user_model::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| { + ( + u.uid, + super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }, + ) + }) + .collect() + } else { + std::collections::HashMap::new() + }; + + let responses = members + .into_iter() + .map(|m| super::RoomMemberResponse { + room: m.room, + user: m.user, + user_info: users.get(&m.user).cloned(), + role: m.role.to_string(), + first_msg_in: m.first_msg_in, + joined_at: m.joined_at, + last_read_seq: m.last_read_seq, + do_not_disturb: m.do_not_disturb, + dnd_start_hour: m.dnd_start_hour, + dnd_end_hour: m.dnd_end_hour, + }) + .collect(); + Ok(responses) + } + + pub async fn room_member_add( + &self, + room_id: Uuid, + request: super::RoomMemberAddRequest, + ctx: &WsUserContext, + ) -> Result { + let actor_id = ctx.user_id; + let room_model = self.find_room_or_404(room_id).await?; + self.require_room_admin(room_id, actor_id).await?; + + let target_project_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(room_model.project)) + .filter(project_members::Column::User.eq(request.user_id)) + .one(&self.db) + .await?; + if target_project_member.is_none() { + return Err(RoomError::NoPower); + } + + if let Some(existing) = self.find_room_member(room_id, request.user_id).await? { + let user_info = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(request.user_id)) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }); + let mut response = super::RoomMemberResponse::from(existing); + response.user_info = user_info; + return Ok(response); + } + + let role = if let Some(role) = request.role { + Self::parse_room_member_role(&role.to_lowercase())? + } else { + RoomMemberRole::Member + }; + + let created = room_member::ActiveModel { + room: Set(room_id), + user: Set(request.user_id), + role: Set(role), + first_msg_in: Set(None), + joined_at: Set(Some(Utc::now())), + last_read_seq: Set(None), + do_not_disturb: Set(false), + dnd_start_hour: Set(None), + dnd_end_hour: Set(None), + } + .insert(&self.db) + .await?; + + drop(self.room_manager.subscribe(room_id, request.user_id).await); + + self.publish_room_event( + room_model.project, + super::RoomEventType::MemberJoined, + Some(room_id), + None, + None, + None, + ) + .await; + + let _ = self + .notification_create(super::NotificationCreateRequest { + notification_type: super::NotificationType::Invitation, + user_id: request.user_id, + title: format!("你已被邀请加入房间 {}", room_model.room_name), + content: None, + room_id: Some(room_id), + project_id: room_model.project, + related_message_id: None, + related_user_id: Some(actor_id), + related_room_id: Some(room_id), + metadata: None, + expires_at: None, + }) + .await; + + let created_response = { + let user_info = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(request.user_id)) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }); + let mut r = super::RoomMemberResponse::from(created); + r.user_info = user_info; + r + }; + Ok(created_response) + } + + pub async fn room_member_update_role( + &self, + room_id: Uuid, + request: super::RoomMemberRoleUpdateRequest, + ctx: &WsUserContext, + ) -> Result { + let actor_id = ctx.user_id; + let actor = self.require_room_admin(room_id, actor_id).await?; + let target = self + .find_room_member(room_id, request.user_id) + .await? + .ok_or_else(|| RoomError::NotFound("Room member not found".to_string()))?; + + if target.role == RoomMemberRole::Owner { + return Err(RoomError::NoPower); + } + + let new_role = Self::parse_room_member_role(&request.role.to_lowercase())?; + if matches!(new_role, RoomMemberRole::Owner) { + return Err(RoomError::NoPower); + } + if actor.role != RoomMemberRole::Owner && matches!(new_role, RoomMemberRole::Admin) { + return Err(RoomError::NoPower); + } + + let old_role = target.role.clone(); + let new_role_cloned = new_role.clone(); + + let mut active: room_member::ActiveModel = target.into(); + active.role = Set(new_role); + let updated = active.update(&self.db).await?; + + let room = self.find_room_or_404(room_id).await?; + let _ = self + .notification_create(super::NotificationCreateRequest { + notification_type: super::NotificationType::RoleChange, + user_id: request.user_id, + title: format!( + "你在房间 {} 的角色已变更为 {}", + room.room_name, new_role_cloned + ), + content: None, + room_id: Some(room_id), + project_id: room.project, + related_message_id: None, + related_user_id: Some(actor_id), + related_room_id: Some(room_id), + metadata: Some(serde_json::json!({ + "old_role": old_role.to_string(), + "new_role": new_role_cloned.to_string(), + })), + expires_at: None, + }) + .await; + + let updated_response = { + let user_info = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(request.user_id)) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }); + let mut r = super::RoomMemberResponse::from(updated); + r.user_info = user_info; + r + }; + Ok(updated_response) + } + + pub async fn room_member_remove( + &self, + room_id: Uuid, + user_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let actor_id = ctx.user_id; + let actor = self.require_room_admin(room_id, actor_id).await?; + let target = self + .find_room_member(room_id, user_id) + .await? + .ok_or_else(|| RoomError::NotFound("Room member not found".to_string()))?; + + if target.role == RoomMemberRole::Owner { + return Err(RoomError::NoPower); + } + if actor.role == RoomMemberRole::Admin && target.role == RoomMemberRole::Admin { + return Err(RoomError::NoPower); + } + + room_member::Entity::delete_by_id((room_id, user_id)) + .exec(&self.db) + .await?; + + self.room_manager.unsubscribe(room_id, user_id).await; + + let room = self.find_room_or_404(room_id).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MemberRemoved, + Some(room_id), + None, + None, + None, + ) + .await; + + Ok(()) + } + + pub async fn room_member_set_read_seq( + &self, + room_id: Uuid, + request: super::RoomMemberReadSeqRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let member = self.require_room_member_model(room_id, user_id).await?; + + let mut active: room_member::ActiveModel = member.into(); + active.last_read_seq = Set(Some(request.last_read_seq)); + let updated = active.update(&self.db).await?; + + let room = self.find_room_or_404(room_id).await?; + self.publish_room_event( + room.project, + super::RoomEventType::ReadReceipt, + Some(room_id), + None, + Some(user_id), + Some(request.last_read_seq), + ) + .await; + + let updated_response = { + let user_info = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(user_id)) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }); + let mut r = super::RoomMemberResponse::from(updated); + r.user_info = user_info; + r + }; + Ok(updated_response) + } + + pub async fn room_member_update_dnd( + &self, + room_id: Uuid, + request: super::RoomMemberUpdateDndRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let member = self.require_room_member_model(room_id, user_id).await?; + + let mut active: room_member::ActiveModel = member.into(); + if let Some(dnd) = request.do_not_disturb { + active.do_not_disturb = Set(dnd); + } + if let Some(start) = request.dnd_start_hour { + if !(0..=23).contains(&start) { + return Err(RoomError::BadRequest("dnd_start_hour must be 0-23".into())); + } + active.dnd_start_hour = Set(Some(start)); + } + if let Some(end) = request.dnd_end_hour { + if !(0..=23).contains(&end) { + return Err(RoomError::BadRequest("dnd_end_hour must be 0-23".into())); + } + active.dnd_end_hour = Set(Some(end)); + } + let updated = active.update(&self.db).await?; + let updated_response = { + let user_info = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(user_id)) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }); + let mut r = super::RoomMemberResponse::from(updated); + r.user_info = user_info; + r + }; + Ok(updated_response) + } +} diff --git a/libs/room/src/message.rs b/libs/room/src/message.rs new file mode 100644 index 0000000..4f5b873 --- /dev/null +++ b/libs/room/src/message.rs @@ -0,0 +1,376 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{room, room_message, room_thread}; +use models::users::user as user_model; +use queue::RoomMessageEnvelope; +use sea_orm::*; +use serde_json; +use uuid::Uuid; + +impl RoomService { + pub async fn room_message_list( + &self, + room_id: Uuid, + before_seq: Option, + after_seq: Option, + limit: Option, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let mut query = room_message::Entity::find().filter(room_message::Column::Room.eq(room_id)); + if let Some(before_seq) = before_seq { + query = query.filter(room_message::Column::Seq.lt(before_seq)); + } + if let Some(after_seq) = after_seq { + query = query.filter(room_message::Column::Seq.gt(after_seq)); + } + + let total = query.clone().count(&self.db).await? as i64; + let models = query + .order_by_desc(room_message::Column::Seq) + .limit(limit.unwrap_or(50)) + .all(&self.db) + .await?; + + let user_ids: Vec = models + .iter() + .filter(|m| m.sender_type.to_string() == "member") + .filter_map(|m| m.sender_id) + .collect(); + let ai_model_ids: Vec = models + .iter() + .filter(|m| m.sender_type.to_string() == "ai") + .filter_map(|m| m.sender_id) + .collect(); + + let users: std::collections::HashMap = if !user_ids.is_empty() { + use sea_orm::ColumnTrait; + user_model::Entity::find() + .filter(user_model::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| (u.uid, u.display_name.unwrap_or(u.username))) + .collect() + } else { + std::collections::HashMap::new() + }; + + let ai_names: std::collections::HashMap = if !ai_model_ids.is_empty() { + use sea_orm::ColumnTrait; + models::agents::model::Entity::find() + .filter(models::agents::model::Column::Id.is_in(ai_model_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|m| (m.id, m.name)) + .collect() + } else { + std::collections::HashMap::new() + }; + + let mut messages: Vec = models + .into_iter() + .map(|msg| { + let sender_type = msg.sender_type.to_string(); + let display_name = match sender_type.as_str() { + "ai" => msg.sender_id.and_then(|id| ai_names.get(&id).cloned()), + _ => msg.sender_id.and_then(|id| users.get(&id).cloned()), + }; + super::RoomMessageResponse { + id: msg.id, + seq: msg.seq, + room: msg.room, + sender_type, + sender_id: msg.sender_id, + display_name, + thread: msg.thread, + in_reply_to: msg.in_reply_to, + content: msg.content, + content_type: msg.content_type.to_string(), + edited_at: msg.edited_at, + send_at: msg.send_at, + revoked: msg.revoked, + revoked_by: msg.revoked_by, + } + }) + .collect(); + messages.reverse(); + + Ok(super::RoomMessageListResponse { messages, total }) + } + + pub async fn room_message_create( + &self, + room_id: Uuid, + request: super::RoomMessageCreateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let room_model = self.find_room_or_404(room_id).await?; + self.require_room_member(room_id, user_id).await?; + + let content_type_str = request + .content_type + .clone() + .unwrap_or_else(|| "text".to_string()); + Self::parse_message_content_type(Some(content_type_str.clone()))?; + Self::validate_content(&request.content, super::MAX_MESSAGE_CONTENT_LEN)?; + let content = Self::sanitize_content(&request.content); + + let thread_id = request.thread; + + if let Some(tid) = thread_id { + let thread = room_thread::Entity::find_by_id(tid) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Thread not found".to_string()))?; + if thread.room != room_id { + return Err(RoomError::BadRequest("thread not in room".to_string())); + } + } + + let seq = self.next_room_message_seq(room_id, &self.db).await?; + let now = Utc::now(); + let id = Uuid::now_v7(); + let project_id = room_model.project; + + let in_reply_to = request.in_reply_to; + let envelope = RoomMessageEnvelope { + id, + dedup_key: Some(format!("{}:{}", room_id, id)), + room_id, + sender_type: "member".to_string(), + sender_id: Some(user_id), + thread_id, + in_reply_to, + content: content.clone(), + content_type: content_type_str.clone(), + send_at: now, + seq, + }; + + let db = &self.db; + let txn = db.begin().await?; + + self.queue.publish(room_id, envelope).await?; + self.room_manager.metrics.messages_sent.increment(1); + + let mut room_active: room::ActiveModel = room_model.clone().into(); + room_active.last_msg_at = Set(now); + room_active.update(&txn).await?; + + if let Some(tid) = thread_id { + let thread = room_thread::Entity::find_by_id(tid) + .one(&txn) + .await? + .ok_or_else(|| RoomError::NotFound("Thread not found".to_string()))?; + + let participants: Vec = + serde_json::from_value(thread.participants.clone()).unwrap_or_default(); + let participants: Vec = if !participants.contains(&user_id) { + let mut p = participants; + p.push(user_id); + p + } else { + participants + }; + + let preview = if content.len() > 50 { + format!("{}...", &content[..50]) + } else { + content.clone() + }; + + let mut active: room_thread::ActiveModel = thread.into(); + active.last_message_at = Set(now); + active.last_message_preview = Set(Some(preview)); + active.participants = Set(serde_json::to_value(participants).unwrap_or_default()); + active.updated_at = Set(now); + active.update(&txn).await?; + } + + txn.commit().await?; + + self.publish_room_event( + project_id, + super::RoomEventType::NewMessage, + Some(room_id), + None, + Some(id), + Some(seq), + ) + .await; + + let mentioned_users = self.resolve_mentions(&request.content).await; + for mentioned_user_id in mentioned_users { + if mentioned_user_id == user_id { + continue; + } + let _ = self + .notification_create(super::NotificationCreateRequest { + notification_type: super::NotificationType::Mention, + user_id: mentioned_user_id, + title: format!("{} 在 {} 中提到了你", user_id, room_model.room_name), + content: Some(content.clone()), + room_id: Some(room_id), + project_id, + related_message_id: Some(id), + related_user_id: Some(user_id), + related_room_id: Some(room_id), + metadata: None, + expires_at: None, + }) + .await; + } + + let should_respond = self.should_ai_respond(room_id).await.unwrap_or(false); + let is_text_message = request + .content_type + .as_ref() + .map(|ct| ct == "text") + .unwrap_or(true); + if should_respond && is_text_message { + if let Err(e) = self + .process_message_ai(room_id, id, user_id, content.clone()) + .await + { + slog::warn!(self.log, "Failed to process AI message: {}", e); + } + } + + let display_name = { + let user = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(user_id)) + .one(&self.db) + .await + .ok() + .flatten(); + user.map(|u| u.display_name.unwrap_or_else(|| u.username)) + }; + + Ok(super::RoomMessageResponse { + id, + seq, + room: room_id, + sender_type: "member".to_string(), + sender_id: Some(user_id), + display_name, + thread: thread_id, + in_reply_to, + content: request.content, + content_type: content_type_str, + edited_at: None, + send_at: now, + revoked: None, + revoked_by: None, + }) + } + + pub async fn room_message_update( + &self, + message_id: Uuid, + request: super::RoomMessageUpdateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + Self::validate_content(&request.content, super::MAX_MESSAGE_CONTENT_LEN)?; + let model = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + + self.require_room_member(model.room, user_id).await?; + if model.sender_id != Some(user_id) { + return Err(RoomError::NoPower); + } + + let elapsed = Utc::now().signed_duration_since(model.send_at); + if elapsed.num_minutes() > 120 { + return Err(RoomError::BadRequest( + "消息只能在发送后 2 小时内编辑".into(), + )); + } + + let old_content = model.content.clone(); + let new_content = request.content.clone(); + self.save_message_edit_history(message_id, user_id, old_content, new_content) + .await?; + + let mut active: room_message::ActiveModel = model.into(); + active.content = Set(request.content); + active.edited_at = Set(Some(Utc::now())); + let updated = active.update(&self.db).await?; + let updated_room = updated.room; + + let room = self.find_room_or_404(updated_room).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MessageEdited, + Some(updated_room), + None, + Some(updated.id), + None, + ) + .await; + + Ok(self.resolve_display_name(updated, updated_room).await) + } + + pub async fn room_message_revoke( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let model = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + + let member = self.require_room_member_model(model.room, user_id).await?; + let can_admin = Self::is_room_admin(&member.role); + let can_author = model.sender_id == Some(user_id); + if !can_admin && !can_author { + return Err(RoomError::NoPower); + } + + let mut active: room_message::ActiveModel = model.into(); + active.revoked = Set(Some(Utc::now())); + active.revoked_by = Set(Some(user_id)); + let updated = active.update(&self.db).await?; + let updated_room = updated.room; + + let room = self.find_room_or_404(updated_room).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MessageRevoked, + Some(updated_room), + None, + Some(updated.id), + None, + ) + .await; + + Ok(self.resolve_display_name(updated, updated_room).await) + } + + pub async fn room_message_get( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let model = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + let room_id = model.room; + self.require_room_member(room_id, user_id).await?; + Ok(self.resolve_display_name(model, room_id).await) + } +} diff --git a/libs/room/src/metrics.rs b/libs/room/src/metrics.rs new file mode 100644 index 0000000..f119f35 --- /dev/null +++ b/libs/room/src/metrics.rs @@ -0,0 +1,193 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use metrics::{ + Counter, Gauge, Histogram, Unit, describe_counter, describe_gauge, describe_histogram, + register_counter, register_gauge, register_histogram, +}; +use tokio::sync::RwLock; +use uuid::Uuid; + +pub struct RoomMetrics { + pub rooms_online: Gauge, + pub users_online: Gauge, + pub ws_connections_active: Gauge, + pub ws_connections_total: Counter, + pub ws_disconnections_total: Counter, + pub messages_sent: Counter, + pub messages_persisted: Counter, + pub messages_persist_failed: Counter, + pub broadcasts_sent: Counter, + pub broadcasts_dropped: Counter, + pub duplicates_skipped: Counter, + pub nats_publish_failed: Counter, + pub message_latency_ms: Histogram, + pub ws_rate_limit_hits: Counter, + pub ws_auth_failures: Counter, + pub ws_heartbeat_sent_total: Counter, + pub ws_heartbeat_timeout_total: Counter, + pub ws_idle_timeout_total: Counter, + room_connections: RwLock>, + room_messages: RwLock>, +} + +impl Default for RoomMetrics { + fn default() -> Self { + describe_gauge!("room_online_rooms", "Number of rooms with active workers"); + describe_gauge!( + "room_online_users", + "Total number of online WebSocket users" + ); + describe_gauge!( + "room_ws_connections_active", + "Current number of active WebSocket connections" + ); + describe_counter!( + "room_ws_connections_total", + Unit::Count, + "Total WebSocket connections established" + ); + describe_counter!( + "room_ws_disconnections_total", + Unit::Count, + "Total WebSocket disconnections" + ); + describe_counter!( + "room_messages_sent_total", + Unit::Count, + "Total messages sent to rooms" + ); + describe_counter!( + "room_messages_persisted_total", + Unit::Count, + "Total messages persisted to database" + ); + describe_counter!( + "room_messages_persist_failed_total", + Unit::Count, + "Total message persistence failures" + ); + describe_counter!( + "room_broadcasts_sent_total", + Unit::Count, + "Total WebSocket broadcasts sent" + ); + describe_counter!( + "room_duplicates_skipped_total", + Unit::Count, + "Total duplicate messages skipped (idempotency)" + ); + describe_counter!( + "room_nats_publish_failed_total", + Unit::Count, + "Total NATS publish failures" + ); + describe_histogram!( + "room_message_latency_ms", + Unit::Milliseconds, + "Message processing latency from publish to persist" + ); + describe_counter!( + "room_ws_rate_limit_hits_total", + Unit::Count, + "Total WebSocket rate limit rejections" + ); + describe_counter!( + "room_ws_auth_failures_total", + Unit::Count, + "Total WebSocket authentication/authorization failures" + ); + describe_counter!( + "room_ws_heartbeat_sent_total", + Unit::Count, + "Total WebSocket heartbeat pings sent by server" + ); + describe_counter!( + "room_ws_heartbeat_timeout_total", + Unit::Count, + "Total WebSocket connections closed due to heartbeat timeout" + ); + describe_counter!( + "room_ws_idle_timeout_total", + Unit::Count, + "Total WebSocket connections closed due to idle timeout" + ); + describe_counter!( + "room_broadcasts_dropped_total", + Unit::Count, + "Total broadcasts dropped due to channel full" + ); + + Self { + rooms_online: register_gauge!("room_online_rooms"), + users_online: register_gauge!("room_online_users"), + ws_connections_active: register_gauge!("room_ws_connections_active"), + ws_connections_total: register_counter!("room_ws_connections_total"), + ws_disconnections_total: register_counter!("room_ws_disconnections_total"), + messages_sent: register_counter!("room_messages_sent_total"), + messages_persisted: register_counter!("room_messages_persisted_total"), + messages_persist_failed: register_counter!("room_messages_persist_failed_total"), + broadcasts_sent: register_counter!("room_broadcasts_sent_total"), + broadcasts_dropped: register_counter!("room_broadcasts_dropped_total"), + duplicates_skipped: register_counter!("room_duplicates_skipped_total"), + nats_publish_failed: register_counter!("room_nats_publish_failed_total"), + message_latency_ms: register_histogram!("room_message_latency_ms"), + ws_rate_limit_hits: register_counter!("room_ws_rate_limit_hits_total"), + ws_auth_failures: register_counter!("room_ws_auth_failures_total"), + ws_heartbeat_sent_total: register_counter!("room_ws_heartbeat_sent_total"), + ws_heartbeat_timeout_total: register_counter!("room_ws_heartbeat_timeout_total"), + ws_idle_timeout_total: register_counter!("room_ws_idle_timeout_total"), + room_connections: RwLock::new(HashMap::new()), + room_messages: RwLock::new(HashMap::new()), + } + } +} + +impl RoomMetrics { + pub fn new() -> Self { + Self::default() + } + + pub fn record_message_latency(&self, ms: f64) { + self.message_latency_ms.record(ms); + } + + pub fn incr_duplicates_skipped(&self) { + self.duplicates_skipped.increment(1); + } + + pub async fn incr_room_connections(&self, room_id: Uuid) { + let mut map = self.room_connections.write().await; + let counter = map.entry(room_id).or_insert_with(|| { + register_gauge!(format!("room_connections{{room_id=\"{}\"}}", room_id)) + }); + counter.increment(1.0); + } + + pub async fn dec_room_connections(&self, room_id: Uuid) { + let map = self.room_connections.read().await; + if let Some(counter) = map.get(&room_id) { + counter.decrement(1.0); + } + } + + pub async fn incr_room_messages(&self, room_id: Uuid) { + let mut map = self.room_messages.write().await; + let counter = map.entry(room_id).or_insert_with(|| { + register_counter!(format!("room_messages_total{{room_id=\"{}\"}}", room_id)) + }); + counter.increment(1); + } + + pub async fn cleanup_stale_rooms(&self, active_room_ids: &[Uuid]) { + let mut conn_map = self.room_connections.write().await; + conn_map.retain(|room_id, _| active_room_ids.contains(room_id)); + + let mut msg_map = self.room_messages.write().await; + msg_map.retain(|room_id, _| active_room_ids.contains(room_id)); + } + + pub fn into_arc(self) -> Arc { + Arc::new(self) + } +} diff --git a/libs/room/src/notification.rs b/libs/room/src/notification.rs new file mode 100644 index 0000000..ee61d3f --- /dev/null +++ b/libs/room/src/notification.rs @@ -0,0 +1,322 @@ +use deadpool_redis::redis; +use std::sync::Arc; + +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::room_notifications; +use models::users::user as user_model; +use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, Set, prelude::Expr, query::*}; +use uuid::Uuid; + +use crate::connection::extract_get_redis; +use crate::error::RoomError; +use crate::service::RoomService; + +impl RoomService { + pub async fn notification_create( + &self, + request: super::NotificationCreateRequest, + ) -> Result { + let notification_type = match request.notification_type { + super::NotificationType::Mention => room_notifications::NotificationType::Mention, + super::NotificationType::Invitation => room_notifications::NotificationType::Invitation, + super::NotificationType::RoleChange => room_notifications::NotificationType::RoleChange, + super::NotificationType::RoomCreated => { + room_notifications::NotificationType::RoomCreated + } + super::NotificationType::RoomDeleted => { + room_notifications::NotificationType::RoomDeleted + } + super::NotificationType::SystemAnnouncement => { + room_notifications::NotificationType::SystemAnnouncement + } + }; + + let model = room_notifications::ActiveModel { + id: Set(Uuid::now_v7()), + room: Set(request.room_id), + project: Set(Some(request.project_id)), + user_id: Set(Some(request.user_id)), + notification_type: Set(notification_type), + related_message_id: Set(request.related_message_id), + related_user_id: Set(request.related_user_id), + related_room_id: Set(request.related_room_id), + title: Set(request.title), + content: Set(request.content), + metadata: Set(request.metadata), + is_read: Set(false), + is_archived: Set(false), + created_at: Set(Utc::now()), + read_at: Set(None), + expires_at: Set(request.expires_at), + } + .insert(&self.db) + .await?; + + let user_info = { + let user = user_model::Entity::find() + .filter(user_model::Column::Uid.eq(request.user_id)) + .one(&self.db) + .await + .ok() + .flatten(); + user.map(|u| super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }) + }; + let response = super::NotificationResponse { + id: model.id, + room: model.room, + project: model.project, + user_id: model.user_id, + user_info, + notification_type: model.notification_type.to_string(), + title: model.title, + content: model.content, + related_message_id: model.related_message_id, + related_user_id: model.related_user_id, + related_room_id: model.related_room_id, + metadata: model.metadata.unwrap_or(serde_json::json!({})), + is_read: model.is_read, + is_archived: model.is_archived, + created_at: model.created_at, + read_at: model.read_at, + expires_at: model.expires_at, + }; + + self.push_notification_event(request.user_id, response.clone()) + .await; + self.incr_unread_count_cache(request.user_id).await; + + Ok(response) + } + + pub async fn notification_list( + &self, + only_unread: Option, + archived: Option, + limit: Option, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + + let show_archived = archived.unwrap_or(false); + let mut query = room_notifications::Entity::find() + .filter(room_notifications::Column::UserId.eq(user_id)) + .filter(room_notifications::Column::IsArchived.eq(show_archived)); + + if only_unread.unwrap_or(false) { + query = query.filter(room_notifications::Column::IsRead.eq(false)); + } + + let unread_count = room_notifications::Entity::find() + .filter(room_notifications::Column::UserId.eq(user_id)) + .filter(room_notifications::Column::IsArchived.eq(false)) + .filter(room_notifications::Column::IsRead.eq(false)) + .count(&self.db) + .await? as i64; + + let total = query.clone().count(&self.db).await? as i64; + + let models = query + .order_by_desc(room_notifications::Column::CreatedAt) + .limit(limit.unwrap_or(50)) + .all(&self.db) + .await?; + + let user_ids: Vec = models.iter().filter_map(|m| m.user_id).collect(); + let users: std::collections::HashMap = if !user_ids.is_empty() { + user_model::Entity::find() + .filter(user_model::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| { + ( + u.uid, + super::UserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url, + }, + ) + }) + .collect() + } else { + std::collections::HashMap::new() + }; + + let notifications: Vec = models + .into_iter() + .map(|m| super::NotificationResponse { + id: m.id, + room: m.room, + project: m.project, + user_id: m.user_id, + user_info: m.user_id.and_then(|uid| users.get(&uid).cloned()), + notification_type: m.notification_type.to_string(), + title: m.title, + content: m.content, + related_message_id: m.related_message_id, + related_user_id: m.related_user_id, + related_room_id: m.related_room_id, + metadata: m.metadata.unwrap_or(serde_json::json!({})), + is_read: m.is_read, + is_archived: m.is_archived, + created_at: m.created_at, + read_at: m.read_at, + expires_at: m.expires_at, + }) + .collect(); + + Ok(super::NotificationListResponse { + notifications, + total, + unread_count, + }) + } + + pub async fn notification_mark_read( + &self, + notification_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + + let model = room_notifications::Entity::find_by_id(notification_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Notification not found".to_string()))?; + + if model.user_id != Some(user_id) { + return Err(RoomError::NoPower); + } + + if !model.is_read { + let mut active: room_notifications::ActiveModel = model.into(); + active.is_read = Set(true); + active.read_at = Set(Some(Utc::now())); + active.update(&self.db).await?; + self.decr_unread_count_cache(user_id).await; + } + + Ok(()) + } + + pub async fn notification_mark_all_read(&self, ctx: &WsUserContext) -> Result { + let user_id = ctx.user_id; + + let result = room_notifications::Entity::update_many() + .filter(room_notifications::Column::UserId.eq(user_id)) + .filter(room_notifications::Column::IsArchived.eq(false)) + .filter(room_notifications::Column::IsRead.eq(false)) + .col_expr(room_notifications::Column::IsRead, Expr::value(true)) + .col_expr( + room_notifications::Column::ReadAt, + Expr::value(Some(Utc::now())), + ) + .exec(&self.db) + .await?; + + self.reset_unread_count_cache(user_id).await; + + Ok(result.rows_affected) + } + + pub async fn notification_archive( + &self, + notification_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + + let model = room_notifications::Entity::find_by_id(notification_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Notification not found".to_string()))?; + + if model.user_id != Some(user_id) { + return Err(RoomError::NoPower); + } + + let mut active: room_notifications::ActiveModel = model.into(); + active.is_archived = Set(true); + active.update(&self.db).await?; + + Ok(()) + } + + pub async fn notification_cleanup_expired(&self) -> Result { + let result = room_notifications::Entity::delete_many() + .filter(room_notifications::Column::ExpiresAt.lt(Utc::now())) + .exec(&self.db) + .await?; + Ok(result.rows_affected) + } + + async fn push_notification_event( + &self, + user_id: Uuid, + notification: super::NotificationResponse, + ) { + let event = super::NotificationEvent::new(notification); + self.room_manager + .push_user_notification(user_id, Arc::new(event)) + .await; + } + + fn unread_cache_key(user_id: Uuid) -> String { + format!("room:notification:unread:{}", user_id) + } + + async fn incr_unread_count_cache(&self, user_id: Uuid) { + let get_redis = extract_get_redis(self.queue.clone()); + let key = Self::unread_cache_key(user_id); + tokio::spawn(async move { + let redis = match (get_redis)().await { + Ok(r) => r, + Err(_) => return, + }; + let mut conn = redis; + let _: Result = redis::cmd("INCR").arg(&key).query_async(&mut conn).await; + let _: Result<(), _> = redis::cmd("EXPIRE") + .arg(&key) + .arg(3600) + .query_async(&mut conn) + .await; + }); + } + + async fn decr_unread_count_cache(&self, user_id: Uuid) { + let get_redis = extract_get_redis(self.queue.clone()); + let key = Self::unread_cache_key(user_id); + tokio::spawn(async move { + let redis = match (get_redis)().await { + Ok(r) => r, + Err(_) => return, + }; + let mut conn = redis; + let _: Result<(), _> = redis::cmd("EVAL") + .arg(r#"local c = redis.call('GET', KEYS[1]); if c and tonumber(c) > 0 then return redis.call('DECR', KEYS[1]) else return 0 end"#) + .arg(1) + .arg(&key) + .query_async(&mut conn) + .await; + }); + } + + async fn reset_unread_count_cache(&self, user_id: Uuid) { + let get_redis = extract_get_redis(self.queue.clone()); + let key = Self::unread_cache_key(user_id); + tokio::spawn(async move { + let redis = match (get_redis)().await { + Ok(r) => r, + Err(_) => return, + }; + let mut conn = redis; + let _: Result<(), _> = redis::cmd("DEL").arg(&key).query_async(&mut conn).await; + }); + } +} diff --git a/libs/room/src/pin.rs b/libs/room/src/pin.rs new file mode 100644 index 0000000..21df946 --- /dev/null +++ b/libs/room/src/pin.rs @@ -0,0 +1,98 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{room_message, room_pin}; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_pin_list( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let pins = room_pin::Entity::find() + .filter(room_pin::Column::Room.eq(room_id)) + .order_by_desc(room_pin::Column::PinnedAt) + .all(&self.db) + .await?; + + Ok(pins.into_iter().map(super::RoomPinResponse::from).collect()) + } + + pub async fn room_pin_add( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let message = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + self.require_room_admin(message.room, user_id).await?; + + if let Some(existing) = room_pin::Entity::find_by_id((message.room, message.id)) + .one(&self.db) + .await? + { + return Ok(super::RoomPinResponse::from(existing)); + } + + let model = room_pin::ActiveModel { + room: Set(message.room), + message: Set(message.id), + pinned_by: Set(user_id), + pinned_at: Set(Utc::now()), + } + .insert(&self.db) + .await?; + + let room = self.find_room_or_404(message.room).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MessagePinned, + Some(message.room), + None, + Some(message.id), + None, + ) + .await; + + Ok(super::RoomPinResponse::from(model)) + } + + pub async fn room_pin_remove( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + let message = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + self.require_room_admin(message.room, user_id).await?; + + room_pin::Entity::delete_by_id((message.room, message.id)) + .exec(&self.db) + .await?; + + let room = self.find_room_or_404(message.room).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MessageUnpinned, + Some(message.room), + None, + Some(message.id), + None, + ) + .await; + + Ok(()) + } +} diff --git a/libs/room/src/reaction.rs b/libs/room/src/reaction.rs new file mode 100644 index 0000000..9a0dbfe --- /dev/null +++ b/libs/room/src/reaction.rs @@ -0,0 +1,322 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::room_message_reaction; +use models::users::user as user_model; +use queue::ReactionGroup; +use sea_orm::*; +use sea_query::OnConflict; +use uuid::Uuid; + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct ReactionGroupResponse { + pub emoji: String, + pub count: i64, + pub reacted_by_me: bool, + pub users: Vec, +} + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct MessageReactionsResponse { + pub message_id: Uuid, + pub reactions: Vec, +} + +#[derive(Debug, Clone, serde::Serialize, utoipa::ToSchema)] +pub struct MessageSearchResponse { + pub messages: Vec, + pub total: i64, +} + +impl RoomService { + pub async fn message_reaction_add( + &self, + message_id: Uuid, + emoji: String, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + + let message = self.find_message_or_404(message_id).await?; + self.require_room_member(message.room, user_id).await?; + + Self::validate_emoji(&emoji)?; + + let now = Utc::now(); + + let reaction = room_message_reaction::ActiveModel { + id: Set(Uuid::now_v7()), + room: Set(message.room), + message: Set(message_id), + user: Set(user_id), + emoji: Set(emoji.clone()), + created_at: Set(now), + }; + + let result = room_message_reaction::Entity::insert(reaction) + .on_conflict( + OnConflict::columns([ + room_message_reaction::Column::Message, + room_message_reaction::Column::User, + room_message_reaction::Column::Emoji, + ]) + .do_nothing() + .to_owned(), + ) + .exec(&self.db) + .await; + + if result.is_ok() { + let reactions = self + .get_message_reactions(message_id, Some(user_id)) + .await?; + let reaction_groups = reactions + .reactions + .into_iter() + .map(|g| ReactionGroup { + emoji: g.emoji, + count: g.count, + reacted_by_me: g.reacted_by_me, + users: g.users, + }) + .collect(); + self.queue + .publish_reaction_event(message.room, message_id, reaction_groups) + .await; + } + + self.get_message_reactions(message_id, Some(user_id)).await + } + + pub async fn message_reaction_remove( + &self, + message_id: Uuid, + emoji: String, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + + let message = self.find_message_or_404(message_id).await?; + self.require_room_member(message.room, user_id).await?; + + room_message_reaction::Entity::delete_many() + .filter(room_message_reaction::Column::Message.eq(message_id)) + .filter(room_message_reaction::Column::User.eq(user_id)) + .filter(room_message_reaction::Column::Emoji.eq(emoji)) + .exec(&self.db) + .await?; + + let reactions = self + .get_message_reactions(message_id, Some(user_id)) + .await?; + let reaction_groups = reactions + .reactions + .into_iter() + .map(|g| ReactionGroup { + emoji: g.emoji, + count: g.count, + reacted_by_me: g.reacted_by_me, + users: g.users, + }) + .collect(); + self.queue + .publish_reaction_event(message.room, message_id, reaction_groups) + .await; + + self.get_message_reactions(message_id, Some(user_id)).await + } + + pub async fn message_reactions_get( + &self, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + + let message = self.find_message_or_404(message_id).await?; + self.require_room_member(message.room, user_id).await?; + + self.get_message_reactions(message_id, Some(user_id)).await + } + + pub async fn message_reactions_batch( + &self, + room_id: Uuid, + message_ids: Vec, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let mut results = Vec::with_capacity(message_ids.len()); + for msg_id in message_ids { + let reactions = self.get_message_reactions(msg_id, Some(user_id)).await?; + results.push(reactions); + } + Ok(results) + } + + pub async fn message_search( + &self, + room_id: Uuid, + query: &str, + limit: Option, + offset: Option, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + if query.trim().is_empty() { + return Ok(MessageSearchResponse { + messages: Vec::new(), + total: 0, + }); + } + + let limit = limit.unwrap_or(20); + let offset = offset.unwrap_or(0); + + let search_pattern = format!("%{}%", query); + + let query_builder = models::rooms::room_message::Entity::find() + .filter(models::rooms::room_message::Column::Room.eq(room_id)) + .filter(models::rooms::room_message::Column::Content.like(&search_pattern)) + .filter(models::rooms::room_message::Column::Revoked.is_null()); + + let total = query_builder.clone().count(&self.db).await? as i64; + + let messages = query_builder + .order_by_desc(models::rooms::room_message::Column::SendAt) + .limit(limit) + .offset(offset) + .all(&self.db) + .await?; + + let response_messages = self.build_messages_with_display_names(messages).await; + + Ok(MessageSearchResponse { + messages: response_messages, + total, + }) + } + + pub(crate) async fn find_message_or_404( + &self, + message_id: Uuid, + ) -> Result { + models::rooms::room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string())) + } + + pub(crate) fn validate_emoji(emoji: &str) -> Result<(), RoomError> { + if emoji.is_empty() || emoji.len() > 50 { + return Err(RoomError::BadRequest("Invalid emoji format".to_string())); + } + Ok(()) + } + + pub(crate) async fn get_message_reactions( + &self, + message_id: Uuid, + current_user_id: Option, + ) -> Result { + let reactions = room_message_reaction::Entity::find() + .filter(room_message_reaction::Column::Message.eq(message_id)) + .all(&self.db) + .await?; + + let reaction_groups = self.build_reaction_groups(reactions, current_user_id); + + Ok(MessageReactionsResponse { + message_id, + reactions: reaction_groups, + }) + } + + pub(crate) fn build_reaction_groups( + &self, + reactions: Vec, + current_user_id: Option, + ) -> Vec { + let mut grouped: std::collections::HashMap> = + std::collections::HashMap::new(); + + for r in &reactions { + grouped.entry(r.emoji.clone()).or_default().push(r); + } + + grouped + .into_iter() + .map(|(emoji, user_reactions)| { + let count = user_reactions.len() as i64; + let reacted_by_me = current_user_id + .map(|uid| user_reactions.iter().any(|r| r.user == uid)) + .unwrap_or(false); + let users = user_reactions.iter().take(3).map(|r| r.user).collect(); + + ReactionGroupResponse { + emoji, + count, + reacted_by_me, + users, + } + }) + .collect() + } + + pub(crate) async fn build_messages_with_display_names( + &self, + messages: Vec, + ) -> Vec { + let user_ids: Vec = messages + .iter() + .filter(|m| m.sender_type.to_string() == "member") + .filter_map(|m| m.sender_id) + .collect(); + + let users: std::collections::HashMap = if !user_ids.is_empty() { + user_model::Entity::find() + .filter(user_model::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await + .unwrap_or_default() + .into_iter() + .map(|u| (u.uid, u.display_name.unwrap_or(u.username))) + .collect() + } else { + std::collections::HashMap::new() + }; + + messages + .into_iter() + .map(|msg| { + let sender_type = msg.sender_type.to_string(); + let display_name = match sender_type.as_str() { + "member" => msg.sender_id.and_then(|id| users.get(&id).cloned()), + _ => None, + }; + + super::RoomMessageResponse { + id: msg.id, + seq: msg.seq, + room: msg.room, + sender_type, + sender_id: msg.sender_id, + display_name, + thread: msg.thread, + in_reply_to: msg.in_reply_to, + content: msg.content, + content_type: msg.content_type.to_string(), + edited_at: msg.edited_at, + send_at: msg.send_at, + revoked: msg.revoked, + revoked_by: msg.revoked_by, + } + }) + .collect() + } +} diff --git a/libs/room/src/room.rs b/libs/room/src/room.rs new file mode 100644 index 0000000..1c2e941 --- /dev/null +++ b/libs/room/src/room.rs @@ -0,0 +1,300 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{ + RoomMemberRole, room, room_ai, room_category, room_member, room_message, room_pin, room_thread, +}; +use queue::ProjectRoomEvent; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_list( + &self, + project_name: String, + only_public: Option, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + let project = self.utils_find_project_by_name(project_name).await?; + self.check_project_access(project.id, user_id).await?; + + let mut query = room::Entity::find().filter(room::Column::Project.eq(project.id)); + if only_public.unwrap_or(false) { + query = query.filter(room::Column::Public.eq(true)); + } + + let models = query + .order_by_desc(room::Column::LastMsgAt) + .all(&self.db) + .await?; + + let room_ids: Vec = models.iter().map(|r| r.id).collect(); + + let latest_seqs: std::collections::HashMap = room_message::Entity::find() + .select_only() + .column(room_message::Column::Room) + .column_as(room_message::Column::Seq.max(), "max_seq") + .filter(room_message::Column::Room.is_in(room_ids.clone())) + .group_by(room_message::Column::Room) + .into_tuple::<(Uuid, Option)>() + .all(&self.db) + .await? + .into_iter() + .map(|(room, seq)| (room, seq.unwrap_or(0))) + .collect(); + + let member_read_seqs: std::collections::HashMap = room_member::Entity::find() + .filter(room_member::Column::User.eq(user_id)) + .filter(room_member::Column::Room.is_in(room_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|m| (m.room, m.last_read_seq.unwrap_or(0))) + .collect(); + + let mut responses = Vec::new(); + for model in models { + let last_read_seq = member_read_seqs.get(&model.id).copied().unwrap_or(0); + let latest_seq = latest_seqs.get(&model.id).copied().unwrap_or(0); + let unread_count = std::cmp::max(latest_seq - last_read_seq, 0); + + let mut response = super::RoomResponse::from(model); + response.unread_count = unread_count; + responses.push(response); + } + + Ok(responses) + } + + pub async fn room_create( + &self, + project_name: String, + request: super::RoomCreateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let project = self.utils_find_project_by_name(project_name).await?; + self.require_project_admin(project.id, user_id).await?; + + Self::validate_name(&request.room_name, super::MAX_ROOM_NAME_LEN)?; + + if let Some(category_id) = request.category { + let category = room_category::Entity::find_by_id(category_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room category not found".to_string()))?; + if category.project != project.id { + return Err(RoomError::BadRequest( + "category does not belong to this project".to_string(), + )); + } + } + + let txn = self.db.begin().await?; + + let room_name = request.room_name.clone(); + let room_model = room::ActiveModel { + id: Set(Uuid::now_v7()), + project: Set(project.id), + room_name: Set(request.room_name), + public: Set(request.public), + category: Set(request.category), + created_by: Set(user_id), + created_at: Set(Utc::now()), + last_msg_at: Set(Utc::now()), + } + .insert(&txn) + .await?; + + room_member::ActiveModel { + room: Set(room_model.id), + user: Set(user_id), + role: Set(RoomMemberRole::Owner), + first_msg_in: Set(None), + joined_at: Set(Some(Utc::now())), + last_read_seq: Set(None), + do_not_disturb: Set(false), + dnd_start_hour: Set(None), + dnd_end_hour: Set(None), + } + .insert(&txn) + .await?; + + txn.commit().await?; + + self.spawn_room_workers(room_model.id); + + let event = ProjectRoomEvent { + event_type: super::RoomEventType::RoomCreated.as_str().into(), + project_id: project.id, + room_id: Some(room_model.id), + category_id: None, + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(project.id, event) + .await; + + self.notify_project_members( + project.id, + super::NotificationType::RoomCreated, + format!("新房间已创建: {}", room_name), + None, + Some(room_model.id), + ); + + Ok(super::RoomResponse::from(room_model)) + } + + pub async fn room_get( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let model = self.find_room_or_404(room_id).await?; + self.ensure_room_visible_for_user(&model, user_id).await?; + Ok(super::RoomResponse::from(model)) + } + + pub async fn room_update( + &self, + room_id: Uuid, + request: super::RoomUpdateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let room_model = self.find_room_or_404(room_id).await?; + self.require_room_admin(room_id, user_id).await?; + + if let Some(category_id) = request.category { + let category = room_category::Entity::find_by_id(category_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room category not found".to_string()))?; + if category.project != room_model.project { + return Err(RoomError::BadRequest( + "category does not belong to this project".to_string(), + )); + } + } + + let mut active: room::ActiveModel = room_model.into(); + + let renamed = request.room_name.is_some(); + let moved = request.category.is_some(); + + if let Some(room_name) = request.room_name { + active.room_name = Set(room_name); + } + if let Some(public) = request.public { + active.public = Set(public); + } + if request.category.is_some() { + active.category = Set(request.category); + } + let updated = active.update(&self.db).await?; + + if renamed { + let event = ProjectRoomEvent { + event_type: super::RoomEventType::RoomRenamed.as_str().into(), + project_id: updated.project, + room_id: Some(updated.id), + category_id: None, + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(updated.project, event) + .await; + } + if moved { + let event = ProjectRoomEvent { + event_type: super::RoomEventType::RoomMoved.as_str().into(), + project_id: updated.project, + room_id: Some(updated.id), + category_id: None, + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(updated.project, event) + .await; + } + + Ok(super::RoomResponse::from(updated)) + } + + pub async fn room_delete(&self, room_id: Uuid, ctx: &WsUserContext) -> Result<(), RoomError> { + let user_id = ctx.user_id; + let room_model = self.find_room_or_404(room_id).await?; + self.require_room_admin(room_id, user_id).await?; + let project_id = room_model.project; + + let txn = self.db.begin().await?; + + room_message::Entity::delete_many() + .filter(room_message::Column::Room.eq(room_id)) + .exec(&txn) + .await?; + + room_pin::Entity::delete_many() + .filter(room_pin::Column::Room.eq(room_id)) + .exec(&txn) + .await?; + + room_thread::Entity::delete_many() + .filter(room_thread::Column::Room.eq(room_id)) + .exec(&txn) + .await?; + + room_member::Entity::delete_many() + .filter(room_member::Column::Room.eq(room_id)) + .exec(&txn) + .await?; + + room_ai::Entity::delete_many() + .filter(room_ai::Column::Room.eq(room_id)) + .exec(&txn) + .await?; + + room::Entity::delete_by_id(room_id).exec(&txn).await?; + + txn.commit().await?; + + self.room_manager.shutdown_room(room_id).await; + + let event = ProjectRoomEvent { + event_type: super::RoomEventType::RoomDeleted.as_str().into(), + project_id, + room_id: Some(room_id), + category_id: None, + message_id: None, + seq: None, + timestamp: Utc::now(), + }; + let _ = self + .queue + .publish_project_room_event(project_id, event) + .await; + + self.notify_project_members( + project_id, + super::NotificationType::RoomDeleted, + format!("房间 {} 已被删除", room_model.room_name), + None, + Some(room_id), + ); + + Ok(()) + } +} diff --git a/libs/room/src/room_ai_queue.rs b/libs/room/src/room_ai_queue.rs new file mode 100644 index 0000000..6855262 --- /dev/null +++ b/libs/room/src/room_ai_queue.rs @@ -0,0 +1,225 @@ +use crate::RoomError; +use db::cache::AppCache; +use std::time::{Duration, Instant}; +use uuid::Uuid; + +const LOCK_TTL_MS: usize = 120_000; +const TICKET_TTL_MS: usize = 90_000; +const MAX_BACKOFF_MS: u64 = 200; + +pub struct RoomAiLockGuard { + cache: AppCache, + queue_key: String, + ticket_key: String, + lock_key: String, + lock_token: String, + request_uid: String, + acquired: bool, + log: slog::Logger, +} + +impl Drop for RoomAiLockGuard { + fn drop(&mut self) { + if !self.acquired { + return; + } + let cache = self.cache.clone(); + let queue_key = self.queue_key.clone(); + let ticket_key = self.ticket_key.clone(); + let lock_key = self.lock_key.clone(); + let lock_token = self.lock_token.clone(); + let request_uid = self.request_uid.clone(); + let log = self.log.clone(); + tokio::spawn(async move { + if let Err(e) = release_lock( + &cache, + &queue_key, + &ticket_key, + &lock_key, + &lock_token, + &request_uid, + ) + .await + { + slog::warn!( + log, + "RoomAiLockGuard: failed to release lock key={} token={} err={}", + lock_key, + lock_token, + e + ); + } + }); + } +} + +pub async fn acquire_room_ai_lock( + cache: &AppCache, + room_id: Uuid, + log: &slog::Logger, +) -> Result, RoomError> { + let request_uid = Uuid::now_v7().to_string(); + let hostname = hostname::get() + .map(|h| h.to_string_lossy().into_owned()) + .unwrap_or_else(|_| "unknown".to_string()); + let pid = std::process::id(); + let lock_token = format!("{}:{}:{}", hostname, pid, request_uid); + + let queue_key = format!("ai:room:queue:{}", room_id); + let seq_key = format!("ai:room:queue:seq:{}", room_id); + let lock_key = format!("ai:room:queue:lock:{}", room_id); + let ticket_key = format!("ai:room:queue:ticket:{}:{}", room_id, request_uid); + + { + let mut conn = cache + .conn() + .await + .map_err(|e| RoomError::Internal(e.to_string()))?; + let seq: i64 = redis::cmd("INCR") + .arg(&seq_key) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("INCR: {}", e)))?; + + let _: i32 = redis::cmd("ZADD") + .arg(&queue_key) + .arg(seq) + .arg(&request_uid) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("ZADD: {}", e)))?; + + let _: () = redis::cmd("SET") + .arg(&ticket_key) + .arg("1") + .arg("PX") + .arg(TICKET_TTL_MS) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("SET ticket: {}", e)))?; + } + + let start = Instant::now(); + let mut retry_count: u32 = 0; + loop { + if start.elapsed().as_millis() as usize >= TICKET_TTL_MS { + slog::warn!( + log, + "RoomAiLock: timeout waiting for lock after {}ms, room_id={}", + start.elapsed().as_millis(), + room_id + ); + return Ok(None); + } + + let mut conn = cache + .conn() + .await + .map_err(|e| RoomError::Internal(e.to_string()))?; + + let _: () = redis::cmd("PEXPIRE") + .arg(&ticket_key) + .arg(TICKET_TTL_MS) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("PEXPIRE: {}", e)))?; + + let head: Vec = redis::cmd("ZRANGE") + .arg(&queue_key) + .arg(0) + .arg(0) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("ZRANGE: {}", e)))?; + + if let Some(head_uid) = head.first() { + if head_uid == &request_uid { + let ok: Option = redis::cmd("SET") + .arg(&lock_key) + .arg(&lock_token) + .arg("NX") + .arg("PX") + .arg(LOCK_TTL_MS) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("SET NX PX: {}", e)))?; + + if ok.is_some() { + return Ok(Some(RoomAiLockGuard { + cache: cache.clone(), + queue_key, + ticket_key, + lock_key, + lock_token, + request_uid, + acquired: true, + log: log.clone(), + })); + } + } else { + let head_ticket_key = format!("ai:room:queue:ticket:{}:{}", room_id, head_uid); + let head_exists: i32 = redis::cmd("EXISTS") + .arg(&head_ticket_key) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("EXISTS: {}", e)))?; + + if head_exists == 0 { + let _: i32 = redis::cmd("ZREM") + .arg(&queue_key) + .arg(head_uid) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("ZREM: {}", e)))?; + } + } + } + + retry_count += 1; + let backoff_exp = retry_count.min(5); + let backoff_ms = std::cmp::min(10 * (2_u64.pow(backoff_exp)), MAX_BACKOFF_MS); + tokio::time::sleep(Duration::from_millis(backoff_ms)).await; + } +} + +async fn release_lock( + cache: &AppCache, + queue_key: &str, + ticket_key: &str, + lock_key: &str, + lock_token: &str, + request_uid: &str, +) -> Result<(), String> { + let mut conn = cache.conn().await.map_err(|e| e.to_string())?; + + let release_script = redis::Script::new( + r#" + if redis.call("GET", KEYS[1]) == ARGV[1] then + return redis.call("DEL", KEYS[1]) + else + return 0 + end + "#, + ); + let _: () = release_script + .key(lock_key) + .arg(lock_token) + .invoke_async(&mut conn) + .await + .map_err(|e| format!("DEL lock: {}", e))?; + + let _: i32 = redis::cmd("ZREM") + .arg(queue_key) + .arg(request_uid) + .query_async(&mut conn) + .await + .map_err(|e| format!("ZREM: {}", e))?; + + let _: () = redis::cmd("DEL") + .arg(ticket_key) + .query_async(&mut conn) + .await + .map_err(|e| format!("DEL ticket: {}", e))?; + + Ok(()) +} diff --git a/libs/room/src/search.rs b/libs/room/src/search.rs new file mode 100644 index 0000000..0503909 --- /dev/null +++ b/libs/room/src/search.rs @@ -0,0 +1,284 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{room_message, room_message_reaction}; +use models::{DateTimeUtc, MessageId, RoomId, RoomThreadId, Seq, UserId}; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_message_search( + &self, + room_id: Uuid, + query: &str, + limit: Option, + offset: Option, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + if query.trim().is_empty() { + return Ok(super::MessageSearchResponse { + messages: Vec::new(), + total: 0, + }); + } + + let limit = std::cmp::min(limit.unwrap_or(20), 100); + let offset = offset.unwrap_or(0); + + // PostgreSQL full-text search via raw SQL with parameterized query. + // plainto_tsquery('simple', $1) is injection-safe — it treats input as text. + let sql = r#" + SELECT id, seq, room, sender_type, sender_id, thread, in_reply_to, + content, content_type, edited_at, send_at, revoked, revoked_by + FROM room_message + WHERE room = $1 + AND content_tsv @@ plainto_tsquery('simple', $2) + AND revoked IS NULL + ORDER BY send_at DESC + LIMIT $3 OFFSET $4"#; + + let stmt = Statement::from_sql_and_values( + DbBackend::Postgres, + sql, + vec![ + room_id.into(), + query.trim().into(), + limit.into(), + offset.into(), + ], + ); + + let rows: Vec = self + .db + .query_all_raw(stmt) + .await? + .into_iter() + .map(|row| { + let sender_type = row + .try_get::("", "sender_type") + .map(|s| match s.as_str() { + "admin" => models::rooms::MessageSenderType::Admin, + "owner" => models::rooms::MessageSenderType::Owner, + "ai" => models::rooms::MessageSenderType::Ai, + "system" => models::rooms::MessageSenderType::System, + "tool" => models::rooms::MessageSenderType::Tool, + "guest" => models::rooms::MessageSenderType::Guest, + _ => models::rooms::MessageSenderType::Member, + }) + .unwrap_or(models::rooms::MessageSenderType::Member); + + let content_type = row + .try_get::("", "content_type") + .map(|s| match s.as_str() { + "image" => models::rooms::MessageContentType::Image, + "audio" => models::rooms::MessageContentType::Audio, + "video" => models::rooms::MessageContentType::Video, + "file" => models::rooms::MessageContentType::File, + _ => models::rooms::MessageContentType::Text, + }) + .unwrap_or(models::rooms::MessageContentType::Text); + + room_message::Model { + id: row.try_get::("", "id").unwrap_or_default(), + seq: row.try_get::("", "seq").unwrap_or_default(), + room: row.try_get::("", "room").unwrap_or_default(), + sender_type, + sender_id: row + .try_get::>("", "sender_id") + .ok() + .flatten(), + thread: row + .try_get::>("", "thread") + .ok() + .flatten(), + in_reply_to: row + .try_get::>("", "in_reply_to") + .ok() + .flatten(), + content: row.try_get::("", "content").unwrap_or_default(), + content_type, + edited_at: row + .try_get::>("", "edited_at") + .ok() + .flatten(), + send_at: row + .try_get::("", "send_at") + .unwrap_or_default(), + revoked: row + .try_get::>("", "revoked") + .ok() + .flatten(), + revoked_by: row + .try_get::>("", "revoked_by") + .ok() + .flatten(), + content_tsv: None, + } + }) + .collect(); + + // Efficient COUNT query. + let count_sql = r#" + SELECT COUNT(*) AS count + FROM room_message + WHERE room = $1 + AND content_tsv @@ plainto_tsquery('simple', $2) + AND revoked IS NULL"#; + + let count_stmt = Statement::from_sql_and_values( + DbBackend::Postgres, + count_sql, + vec![room_id.into(), query.trim().into()], + ); + + let count_row = self.db.query_one_raw(count_stmt).await?; + let total: i64 = count_row + .and_then(|r| r.try_get::("", "count").ok()) + .unwrap_or(0); + + let response_messages = self.build_messages_with_display_names(rows).await; + + Ok(super::MessageSearchResponse { + messages: response_messages, + total, + }) + } + + pub async fn room_message_reaction_list( + &self, + room_id: Uuid, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let _msg = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + + self.get_message_reactions(message_id, Some(user_id)).await + } + + pub async fn room_message_reaction_toggle( + &self, + room_id: Uuid, + message_id: Uuid, + emoji: String, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + if emoji.is_empty() || emoji.len() > 50 { + return Err(RoomError::BadRequest("Invalid emoji format".to_string())); + } + + if let Some(existing) = room_message_reaction::Entity::find() + .filter(room_message_reaction::Column::Room.eq(room_id)) + .filter(room_message_reaction::Column::Message.eq(message_id)) + .filter(room_message_reaction::Column::User.eq(user_id)) + .filter(room_message_reaction::Column::Emoji.eq(&emoji)) + .one(&self.db) + .await? + { + room_message_reaction::Entity::delete_by_id(existing.id) + .exec(&self.db) + .await?; + } else { + room_message_reaction::ActiveModel { + id: Set(Uuid::now_v7()), + room: Set(room_id), + message: Set(message_id), + user: Set(user_id), + emoji: Set(emoji), + created_at: Set(Utc::now()), + } + .insert(&self.db) + .await?; + } + + self.get_message_reactions(message_id, Some(user_id)).await + } + + pub async fn room_message_edit_history( + &self, + room_id: Uuid, + message_id: Uuid, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let _msg = room_message::Entity::find_by_id(message_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Message not found".to_string()))?; + + let history = models::rooms::room_message_edit_history::Entity::find() + .filter(models::rooms::room_message_edit_history::Column::Message.eq(message_id)) + .order_by_asc(models::rooms::room_message_edit_history::Column::EditedAt) + .all(&self.db) + .await?; + + let total_edits = history.len() as i64; + + let entries: Vec = history + .into_iter() + .map(|h| super::MessageEditHistoryEntry { + old_content: h.old_content, + new_content: h.new_content, + edited_at: h.edited_at, + }) + .collect(); + + Ok(super::MessageEditHistoryResponse { + message_id, + history: entries, + total_edits, + }) + } + + pub async fn room_member_leave( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result<(), RoomError> { + let user_id = ctx.user_id; + + let member = self + .find_room_member(room_id, user_id) + .await? + .ok_or_else(|| RoomError::NotFound("You are not a member of this room".to_string()))?; + + if member.role.to_string() == "owner" { + return Err(RoomError::BadRequest( + "Owner cannot leave the room. Transfer ownership first.".to_string(), + )); + } + + models::rooms::room_member::Entity::delete_by_id((room_id, user_id)) + .exec(&self.db) + .await?; + + self.room_manager.unsubscribe(room_id, user_id).await; + + let room = self.find_room_or_404(room_id).await?; + self.publish_room_event( + room.project, + super::RoomEventType::MemberRemoved, + Some(room_id), + None, + None, + None, + ) + .await; + + Ok(()) + } +} diff --git a/libs/room/src/service.rs b/libs/room/src/service.rs new file mode 100644 index 0000000..e9cd852 --- /dev/null +++ b/libs/room/src/service.rs @@ -0,0 +1,1174 @@ +use dashmap::DashMap; +use std::pin::Pin; +use std::sync::Arc; +use std::sync::LazyLock; + +use chrono::Utc; +use db::cache::AppCache; +use db::database::AppDatabase; +use models::projects::project_members; +use models::rooms::room; +use models::rooms::room_ai; +use models::EntityTrait; +use queue::{AgentTaskEvent, MessageProducer, ProjectRoomEvent, RoomMessageEnvelope}; +use sea_orm::{sea_query::Expr, ColumnTrait, ExprTrait, QueryFilter, QueryOrder, QuerySelect}; +use uuid::Uuid; + +use crate::connection::{ + extract_get_redis, make_persist_fn, DedupCache, PersistFn, RoomConnectionManager, +}; +use crate::error::RoomError; +use agent::chat::{AiRequest, ChatService, Mention}; +use agent::TaskService; +use models::agent_task::AgentType; + +const DEFAULT_MAX_CONCURRENT_WORKERS: usize = 1024; + +static USER_MENTION_RE: LazyLock regex_lite::Regex> = + LazyLock::new(|| regex_lite::Regex::new(r"\s*([^<]+?)\s*").unwrap()); + +#[derive(Clone)] +pub struct RoomService { + pub db: AppDatabase, + pub cache: AppCache, + pub room_manager: Arc, + pub queue: MessageProducer, + pub redis_url: String, + pub chat_service: Option>, + pub task_service: Option>, + pub log: slog::Logger, + worker_semaphore: Arc, + dedup_cache: DedupCache, +} + +impl RoomService { + pub fn new( + db: AppDatabase, + cache: AppCache, + queue: MessageProducer, + room_manager: Arc, + redis_url: String, + chat_service: Option>, + task_service: Option>, + log: slog::Logger, + max_concurrent_workers: Option, + ) -> Self { + let dedup_cache: DedupCache = + Arc::new(DashMap::with_capacity_and_hasher(10000, Default::default())); + Self { + db, + cache, + room_manager, + queue, + redis_url, + chat_service, + task_service, + log, + worker_semaphore: Arc::new(tokio::sync::Semaphore::new( + max_concurrent_workers.unwrap_or(DEFAULT_MAX_CONCURRENT_WORKERS), + )), + dedup_cache, + } + } + + pub async fn start_workers( + &self, + mut shutdown_rx: tokio::sync::broadcast::Receiver<()>, + log: slog::Logger, + ) -> anyhow::Result<()> { + use models::rooms::Room; + use sea_orm::EntityTrait; + + let rooms: Vec = Room::find().all(&self.db).await?; + let room_ids: Vec = rooms.iter().map(|r| r.id).collect(); + let project_ids: Vec = rooms + .iter() + .map(|r| r.project) + .collect::>() + .into_iter() + .collect(); + + // Save a clone for task subscriber handles before `project_ids` gets moved. + let task_project_ids = project_ids.clone(); + + slog::info!(log, "starting room workers"; + "room_count" => room_ids.len(), "project_count" => project_ids.len()); + + let persist_fn: PersistFn = make_persist_fn( + self.db.clone(), + self.room_manager.metrics.clone(), + self.dedup_cache.clone(), + ); + + let get_redis: Arc queue::worker::RedisFuture + Send + Sync> = + extract_get_redis(self.queue.clone()); + + let worker_log = log.clone(); + let worker_room_ids = room_ids.clone(); + let worker_shutdown = shutdown_rx.resubscribe(); + let worker_handle = tokio::spawn({ + let get_redis = get_redis.clone(); + let persist_fn = persist_fn.clone(); + async move { + queue::start_worker( + worker_room_ids, + get_redis, + persist_fn, + worker_shutdown, + worker_log, + ) + .await; + } + }); + + let manager = self.room_manager.clone(); + let subscriber_log = log.clone(); + let redis_url = self.redis_url.clone(); + + let mut handles: Vec<_> = room_ids + .into_iter() + .map(|room_id| { + let manager = manager.clone(); + let log = subscriber_log.clone(); + let redis_url = redis_url.clone(); + let shutdown_rx = shutdown_rx.resubscribe(); + tokio::spawn(async move { + crate::connection::subscribe_room_events( + redis_url, + manager, + room_id, + log, + shutdown_rx, + ) + .await; + }) + }) + .collect(); + + let project_handles: Vec<_> = project_ids + .into_iter() + .map(|project_id| { + let manager = manager.clone(); + let log = subscriber_log.clone(); + let redis_url = redis_url.clone(); + let shutdown_rx = shutdown_rx.resubscribe(); + tokio::spawn(async move { + crate::connection::subscribe_project_room_events( + redis_url, + manager, + project_id, + log, + shutdown_rx, + ) + .await; + }) + }) + .collect(); + handles.extend(project_handles); + + // Subscribe to agent task events for each project. + let task_handles: Vec<_> = task_project_ids + .into_iter() + .map(|project_id| { + let manager = manager.clone(); + let log = subscriber_log.clone(); + let redis_url = redis_url.clone(); + let shutdown_rx = shutdown_rx.resubscribe(); + tokio::spawn(async move { + crate::connection::subscribe_task_events_fn( + redis_url, + manager, + project_id, + log, + shutdown_rx, + ) + .await; + }) + }) + .collect(); + handles.extend(task_handles); + + let cleanup_handle = { + let manager = self.room_manager.clone(); + let db = self.db.clone(); + let dedup_cache = self.dedup_cache.clone(); + let mut cleanup_shutdown = shutdown_rx.resubscribe(); + tokio::spawn(async move { + let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(300)); + interval.tick().await; + loop { + tokio::select! { + _ = interval.tick() => { + manager.cleanup_rate_limit().await; + crate::connection::cleanup_dedup_cache(&dedup_cache); + if let Ok(rooms) = Room::find().all(&db).await { + let room_ids: Vec<_> = rooms.iter().map(|r| r.id).collect(); + let project_ids: Vec<_> = rooms.iter().map(|r| r.project).collect(); + manager.metrics.cleanup_stale_rooms(&room_ids).await; + manager.prune_stale_rooms(&room_ids).await; + manager.prune_stale_projects(&project_ids).await; + } + } + _ = cleanup_shutdown.recv() => { + slog::info!(slog::Logger::root(slog::Discard, slog::o!()), "cleanup task shutting down"); + break; + } + } + } + }) + }; + handles.push(cleanup_handle); + + let _ = shutdown_rx.recv().await; + + slog::info!(log, "room workers shutting down"); + + for h in handles { + let _ = h.abort(); + } + let _ = worker_handle.await; + + slog::info!(log, "room workers stopped"); + Ok(()) + } + + /// Spawn a background agent task: + /// 1. Creates a DB record (status = pending → running) + /// 2. Publishes a "started" event via Redis Pub/Sub + /// 3. Runs `execute()` behind a semaphore + /// 4. On complete/fail, updates the record and publishes the final event + pub async fn spawn_agent_task( + &self, + project_id: Uuid, + agent_type: AgentType, + input: String, + title: Option, + execute: F, + ) -> anyhow::Result + where + F: FnOnce(i64, Arc) -> Fut + Send + 'static, + Fut: std::future::Future> + Send, + { + let task_service = match &self.task_service { + Some(ts) => ts.clone(), + None => return Err(anyhow::anyhow!("task service not configured")), + }; + + let task = task_service + .create(project_id, input, agent_type) + .await + .map_err(|e| anyhow::anyhow!("create task failed: {}", e))?; + + let task_id = task.id; + + // Publish "started" event via Redis Pub/Sub. + let started_event = AgentTaskEvent { + task_id, + project_id, + parent_id: task.parent_id, + event: "started".to_string(), + message: None, + output: None, + error: None, + status: models::agent_task::TaskStatus::Running.to_string(), + timestamp: Utc::now(), + }; + self.queue + .publish_agent_task_event(project_id, started_event) + .await; + + // Mark task as running. + let _ = task_service.start(task_id).await; + + let queue = self.queue.clone(); + let room_manager = self.room_manager.clone(); + let semaphore = self.worker_semaphore.clone(); + let log = self.log.clone(); + + // Spawn the background task. + tokio::spawn(async move { + let _permit = semaphore.acquire().await.expect("semaphore closed"); + + let result = execute(task_id, task_service.clone()).await; + + let event = match result { + Ok(output) => { + let _ = task_service.complete(task_id, &output).await; + AgentTaskEvent { + task_id, + project_id, + parent_id: None, + event: "done".to_string(), + message: None, + output: Some(output), + error: None, + status: models::agent_task::TaskStatus::Done.to_string(), + timestamp: chrono::Utc::now(), + } + } + Err(err) => { + let _ = task_service.fail(task_id, &err).await; + AgentTaskEvent { + task_id, + project_id, + parent_id: None, + event: "failed".to_string(), + message: None, + output: None, + error: Some(err), + status: models::agent_task::TaskStatus::Failed.to_string(), + timestamp: chrono::Utc::now(), + } + } + }; + + queue + .publish_agent_task_event(project_id, event.clone()) + .await; + room_manager.broadcast_agent_task(project_id, event).await; + slog::info!(log, "agent task finished"; "task_id" => task_id, "project_id" => %project_id); + }); + + Ok(task_id) + } + + pub fn spawn_room_workers(&self, room_id: uuid::Uuid) { + let persist_fn: PersistFn = make_persist_fn( + self.db.clone(), + self.room_manager.metrics.clone(), + self.dedup_cache.clone(), + ); + let get_redis: Arc queue::worker::RedisFuture + Send + Sync> = + extract_get_redis(self.queue.clone()); + let manager = self.room_manager.clone(); + let redis_url = self.redis_url.clone(); + let log = self.log.clone(); + let semaphore = self.worker_semaphore.clone(); + let db = self.db.clone(); + + let log2 = log.clone(); + let log3 = log.clone(); + let manager2 = self.room_manager.clone(); + let redis_url2 = redis_url.clone(); + let redis_url3 = redis_url.clone(); + + tokio::spawn(async move { + let _permit = match semaphore.acquire_owned().await { + Ok(p) => p, + Err(_) => return, + }; + let (shutdown_tx, shutdown_rx) = tokio::sync::broadcast::channel::<()>(1); + queue::room_worker_task( + room_id, + uuid::Uuid::new_v4().to_string(), + get_redis, + persist_fn, + shutdown_rx, + log, + ) + .await; + let _ = shutdown_tx.send(()); + }); + + tokio::spawn(async move { + let shutdown_rx = manager.register_room(room_id).await; + crate::connection::subscribe_room_events( + redis_url2, + manager.clone(), + room_id, + log2, + shutdown_rx, + ) + .await; + }); + + tokio::spawn(async move { + let project_id = { + let room = room::Entity::find_by_id(room_id) + .one(&db) + .await + .ok() + .flatten(); + match room { + Some(r) => r.project, + None => return, + } + }; + let shutdown_rx = manager2.register_project(project_id).await; + crate::connection::subscribe_project_room_events( + redis_url3, + manager2, + project_id, + log3, + shutdown_rx, + ) + .await; + }); + } + + pub async fn publish_room_event( + &self, + project_id: uuid::Uuid, + event_type: super::RoomEventType, + room_id: Option, + category_id: Option, + message_id: Option, + seq: Option, + ) { + let event = ProjectRoomEvent { + event_type: event_type.as_str().into(), + project_id, + room_id, + category_id, + message_id, + seq, + timestamp: Utc::now(), + }; + self.queue + .publish_project_room_event(project_id, event) + .await; + } + + pub(crate) fn notify_project_members( + &self, + project_id: uuid::Uuid, + notification_type: super::NotificationType, + title: String, + content: Option, + related_room_id: Option, + ) { + let db = self.db.clone(); + let notification_type_inner = notification_type; + let title_inner = title; + let content_inner = content; + let related_room_id_inner = related_room_id; + let project_id_inner = project_id; + + tokio::spawn(async move { + let members = match project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_id_inner)) + .all(&db) + .await + { + Ok(m) => m, + Err(e) => { + slog::error!(slog::Logger::root(slog::Discard, slog::o!()), + "notify_project_members: failed to fetch members"; + "project_id" => %project_id_inner, "error" => %e); + return; + } + }; + + for member in members { + let user_id = member.user; + if let Err(e) = Self::_notification_create_sync( + &db, + notification_type_inner, + user_id, + title_inner.clone(), + content_inner.clone(), + related_room_id_inner, + project_id_inner, + ) + .await + { + slog::warn!(slog::Logger::root(slog::Discard, slog::o!()), + "notify_project_members: failed to create notification for user"; + "user_id" => %user_id, "project_id" => %project_id_inner, "error" => %e); + } + } + }); + } + + async fn _notification_create_sync( + db: &db::database::AppDatabase, + notification_type: super::NotificationType, + user_id: uuid::Uuid, + title: String, + content: Option, + related_room_id: Option, + project_id: uuid::Uuid, + ) -> Result<(), crate::error::RoomError> { + use chrono::Utc; + use models::rooms::room_notifications; + use sea_orm::{ActiveModelTrait, Set}; + + let notification_type_model = match notification_type { + super::NotificationType::Mention => room_notifications::NotificationType::Mention, + super::NotificationType::Invitation => room_notifications::NotificationType::Invitation, + super::NotificationType::RoleChange => room_notifications::NotificationType::RoleChange, + super::NotificationType::RoomCreated => { + room_notifications::NotificationType::RoomCreated + } + super::NotificationType::RoomDeleted => { + room_notifications::NotificationType::RoomDeleted + } + super::NotificationType::SystemAnnouncement => { + room_notifications::NotificationType::SystemAnnouncement + } + }; + + let _model = room_notifications::ActiveModel { + id: Set(uuid::Uuid::now_v7()), + room: Set(related_room_id), + project: Set(Some(project_id)), + user_id: Set(Some(user_id)), + notification_type: Set(notification_type_model), + related_message_id: Set(None), + related_user_id: Set(None), + related_room_id: Set(related_room_id), + title: Set(title), + content: Set(content), + metadata: Set(None), + is_read: Set(false), + is_archived: Set(false), + created_at: Set(Utc::now()), + read_at: Set(None), + expires_at: Set(None), + } + .insert(db) + .await + .map_err(|e| crate::error::RoomError::Database(e))?; + + Ok(()) + } + + pub fn extract_mentions(content: &str) -> Vec { + let mut mentioned = Vec::new(); + for cap in USER_MENTION_RE.captures_iter(content) { + if let Some(inner) = cap.get(1) { + let token = inner.as_str().trim(); + if let Ok(uuid) = Uuid::parse_str(token) { + if !mentioned.contains(&uuid) { + mentioned.push(uuid); + } + } + } + } + + mentioned + } + + pub async fn resolve_mentions(&self, content: &str) -> Vec { + use models::users::User; + use sea_orm::EntityTrait; + + let mut resolved: Vec = Vec::new(); + let mut seen_usernames: Vec = Vec::new(); + + for cap in USER_MENTION_RE.captures_iter(content) { + if let Some(inner) = cap.get(1) { + let token = inner.as_str().trim(); + + if let Ok(uuid) = Uuid::parse_str(token) { + if !resolved.contains(&uuid) { + resolved.push(uuid); + } + continue; + } + + let token_lower = token.to_lowercase(); + if seen_usernames.contains(&token_lower) { + continue; + } + seen_usernames.push(token_lower.clone()); + + if let Some(user) = User::find() + .filter(models::users::user::Column::Username.eq(token_lower)) + .one(&self.db) + .await + .ok() + .flatten() + { + if !resolved.contains(&user.uid) { + resolved.push(user.uid); + } + } + } + } + + resolved + } + + pub async fn check_room_access(&self, room_id: Uuid, user_id: Uuid) -> Result<(), RoomError> { + let room = room::Entity::find_by_id(room_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room not found".to_string()))?; + + if room.public { + return Ok(()); + } + + if self.require_room_member(room_id, user_id).await.is_ok() { + return Ok(()); + } + + self.check_project_member(room.project, user_id).await?; + + Ok(()) + } + + pub async fn check_project_member( + &self, + project_id: Uuid, + user_id: Uuid, + ) -> Result<(), RoomError> { + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_id)) + .filter(project_members::Column::User.eq(user_id)) + .one(&self.db) + .await?; + + if member.is_some() { + Ok(()) + } else { + Err(RoomError::NoPower) + } + } + + pub async fn should_ai_respond(&self, room_id: Uuid) -> Result { + use models::rooms::room_ai; + + let ai_config = room_ai::Entity::find() + .filter(room_ai::Column::Room.eq(room_id)) + .one(&self.db) + .await?; + + Ok(ai_config.is_some()) + } + + pub async fn get_room_ai_config( + &self, + room_id: Uuid, + ) -> Result, RoomError> { + use models::rooms::room_ai; + + let ai_config = room_ai::Entity::find() + .filter(room_ai::Column::Room.eq(room_id)) + .one(&self.db) + .await?; + + Ok(ai_config) + } + + pub async fn get_user_names( + &self, + user_ids: &[Uuid], + ) -> std::collections::HashMap { + use models::users::User; + use sea_orm::EntityTrait; + + let mut names = std::collections::HashMap::new(); + if user_ids.is_empty() { + return names; + } + + let users = User::find() + .filter(models::users::user::Column::Uid.is_in(user_ids.to_vec())) + .all(&self.db) + .await + .unwrap_or_default(); + + for user in users { + names.insert(user.uid, user.username); + } + + names + } + + pub async fn require_room_member(&self, room_id: Uuid, user_id: Uuid) -> Result<(), RoomError> { + use models::rooms::room_member::{Column as RmCol, Entity as RoomMember}; + + let member = RoomMember::find() + .filter(RmCol::Room.eq(room_id)) + .filter(RmCol::User.eq(user_id)) + .one(&self.db) + .await?; + + member + .ok_or_else(|| RoomError::NotFound("Room member not found".to_string())) + .map(|_| ()) + } + + pub async fn find_room_or_404(&self, room_id: Uuid) -> Result { + room::Entity::find_by_id(room_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Room not found".to_string())) + } + + pub async fn process_message_ai( + &self, + room_id: Uuid, + _message_id: Uuid, + sender_id: Uuid, + content: String, + ) -> Result<(), RoomError> { + let Some(chat_service) = &self.chat_service else { + return Ok(()); + }; + + let Some(ai_config) = self.get_room_ai_config(room_id).await? else { + return Ok(()); + }; + + let Some(lock_guard) = + crate::room_ai_queue::acquire_room_ai_lock(&self.cache, room_id, &self.log).await? + else { + return Ok(()); + }; + + let room = self.find_room_or_404(room_id).await?; + + let project = models::projects::project::Entity::find_by_id(room.project) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Project not found".to_string()))?; + + let model = models::agents::model::Entity::find_by_id(ai_config.model) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("AI model not found".to_string()))?; + + let sender = models::users::User::find_by_id(sender_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Sender not found".to_string()))?; + + let history = self.get_room_history(room_id, 50).await?; + + let user_ids: Vec = history + .iter() + .filter_map(|m| m.sender_id) + .chain(std::iter::once(sender_id)) + .collect(); + let user_names = self.get_user_names(&user_ids).await; + + let mentions = self.extract_mention_context(&content).await; + + let request = AiRequest { + db: self.db.clone(), + cache: self.cache.clone(), + model, + project: project.clone(), + sender, + room: room.clone(), + input: content, + mention: mentions, + history, + user_names, + temperature: ai_config.temperature.unwrap_or(0.7), + max_tokens: ai_config.max_tokens.unwrap_or(4096) as i32, + top_p: 1.0, + frequency_penalty: 0.0, + presence_penalty: 0.0, + think: ai_config.think, + tools: None, + max_tool_depth: 0, + }; + + let use_streaming = ai_config.stream; + + if use_streaming { + self.process_message_ai_streaming( + chat_service.clone(), + request, + room_id, + room.project, + ai_config.model, + lock_guard, + ) + .await; + } else { + self.process_message_ai_nonstreaming( + chat_service.clone(), + request, + room_id, + room.project, + ai_config.model, + lock_guard, + ) + .await; + } + + Ok(()) + } + + async fn process_message_ai_streaming( + &self, + chat_service: Arc, + request: AiRequest, + room_id: Uuid, + project_id: Uuid, + _model_id: Uuid, + lock_guard: crate::room_ai_queue::RoomAiLockGuard, + ) { + use queue::RoomMessageStreamChunkEvent; + + let streaming_msg_id = Uuid::now_v7(); + let seq = match Self::next_room_message_seq_internal(room_id, &self.db, &self.cache).await { + Ok(s) => s, + Err(e) => { + slog::error!( + self.log, + "Failed to get seq for streaming AI message: {}", + e + ); + return; + } + }; + + let stream_rx = self + .room_manager + .register_stream_channel(streaming_msg_id) + .await; + + let room_manager = self.room_manager.clone(); + let db = self.db.clone(); + let room_id_inner = room_id; + let project_id_inner = project_id; + let now = Utc::now(); + let sender_type = "ai".to_string(); + let queue = self.queue.clone(); + let ai_display_name = request.model.name.clone(); + + let db = db.clone(); + let model_id = request.model.id; + let log = self.log.clone(); + tokio::spawn(async move { + let _lock_guard = lock_guard; + let room_manager = room_manager.clone(); + let db = db.clone(); + let model_id = model_id; + let ai_display_name = ai_display_name; + + let streaming_msg_id = streaming_msg_id; + let room_id_for_chunk = room_id_inner; + let chunk_count = std::sync::Arc::new(std::sync::atomic::AtomicU64::new(0)); + let room_manager_cb = room_manager.clone(); + + let on_chunk = move |chunk: agent::chat::AiStreamChunk| { + Box::pin({ + let room_manager = room_manager_cb.clone(); + let streaming_msg_id = streaming_msg_id; + let room_id = room_id_for_chunk; + let chunk_count = chunk_count.clone(); + async move { + let event = RoomMessageStreamChunkEvent { + message_id: streaming_msg_id, + room_id, + content: chunk.content, + done: chunk.done, + error: None, + }; + room_manager.broadcast_stream_chunk(event).await; + + chunk_count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + } + }) as Pin + Send>> + }; + + let stream_callback: agent::chat::StreamCallback = Box::new(on_chunk); + + match chat_service.process_stream(request, stream_callback).await { + Ok(()) => { + let full_content = { + let mut rx = stream_rx; + let mut content = String::new(); + while let Ok(chunk_event) = rx.recv().await { + if chunk_event.done { + content = chunk_event.content.clone(); + break; + } + content = chunk_event.content.clone(); + } + content + }; + + let envelope = RoomMessageEnvelope { + id: streaming_msg_id, + dedup_key: Some(format!("{}:{}", room_id_inner, streaming_msg_id)), + room_id: room_id_inner, + sender_type: sender_type.clone(), + sender_id: None, + thread_id: None, + content: full_content.clone(), + content_type: "text".to_string(), + send_at: now, + seq, + in_reply_to: None, + }; + + if let Err(e) = queue.publish(room_id_inner, envelope).await { + slog::error!(log, "Failed to publish streaming AI message: {}", e); + } else { + let now = Utc::now(); + if let Err(e) = room_ai::Entity::update_many() + .col_expr( + room_ai::Column::CallCount, + Expr::col(room_ai::Column::CallCount).add(1), + ) + .col_expr(room_ai::Column::LastCallAt, Expr::value(Some(now))) + .filter(room_ai::Column::Room.eq(room_id_inner)) + .filter(room_ai::Column::Model.eq(model_id)) + .exec(&db) + .await + { + slog::warn!(log, "Failed to update room_ai call stats: {}", e); + } + + let msg_event = queue::RoomMessageEvent { + id: streaming_msg_id, + room_id: room_id_inner, + sender_type: sender_type.clone(), + sender_id: None, + thread_id: None, + content: full_content, + content_type: "text".to_string(), + send_at: now, + seq, + display_name: Some(ai_display_name.clone()), + in_reply_to: None, + reactions: None, + }; + room_manager.broadcast(room_id_inner, msg_event).await; + room_manager.metrics.messages_sent.increment(1); + + let event = queue::ProjectRoomEvent { + event_type: super::RoomEventType::NewMessage.as_str().into(), + project_id: project_id_inner, + room_id: Some(room_id_inner), + category_id: None, + message_id: Some(streaming_msg_id), + seq: Some(seq), + timestamp: now, + }; + queue + .publish_project_room_event(project_id_inner, event) + .await; + } + } + Err(e) => { + slog::error!(log, "AI streaming failed: {}", e); + let event = RoomMessageStreamChunkEvent { + message_id: streaming_msg_id, + room_id: room_id_inner, + content: String::new(), + done: true, + error: Some(e.to_string()), + }; + room_manager.broadcast_stream_chunk(event).await; + } + } + + room_manager.close_stream_channel(streaming_msg_id).await; + }); + } + + async fn process_message_ai_nonstreaming( + &self, + chat_service: Arc, + request: AiRequest, + room_id: Uuid, + project_id: Uuid, + model_id: Uuid, + lock_guard: crate::room_ai_queue::RoomAiLockGuard, + ) { + let chat_service = chat_service.clone(); + let db = self.db.clone(); + let cache = self.cache.clone(); + let queue = self.queue.clone(); + let room_manager = self.room_manager.clone(); + let log = self.log.clone(); + let room_id_for_ai = room_id; + let project_id_for_ai = project_id; + let model_id_inner = model_id; + + tokio::spawn(async move { + let _lock_guard = lock_guard; + let model_display_name = request.model.name.clone(); + match chat_service.process(request).await { + Ok(response) => { + if let Err(e) = Self::create_and_publish_ai_message( + &db, + &cache, + &queue, + &room_manager, + room_id_for_ai, + project_id_for_ai, + Uuid::now_v7(), + response, + Some(model_display_name), + ) + .await + { + slog::error!(log, "Failed to create AI message: {}", e); + } else { + let now = Utc::now(); + if let Err(e) = room_ai::Entity::update_many() + .col_expr( + room_ai::Column::CallCount, + Expr::col(room_ai::Column::CallCount).add(1), + ) + .col_expr(room_ai::Column::LastCallAt, Expr::value(Some(now))) + .filter(room_ai::Column::Room.eq(room_id_for_ai)) + .filter(room_ai::Column::Model.eq(model_id_inner)) + .exec(&db) + .await + { + slog::warn!(log, "Failed to update room_ai call stats: {}", e); + } + } + } + Err(e) => { + slog::error!(log, "AI processing failed: {}", e); + } + } + }); + } + + pub async fn create_and_publish_ai_message( + db: &AppDatabase, + cache: &AppCache, + queue: &MessageProducer, + room_manager: &Arc, + room_id: Uuid, + project_id: Uuid, + _reply_to: Uuid, + content: String, + model_display_name: Option, + ) -> Result { + let now = Utc::now(); + let seq = Self::next_room_message_seq_internal(room_id, db, cache).await?; + let id = Uuid::now_v7(); + + let envelope = RoomMessageEnvelope { + id, + dedup_key: Some(format!("{}:{}", room_id, id)), + room_id, + sender_type: "ai".to_string(), + sender_id: None, + thread_id: None, + content: content.clone(), + content_type: "text".to_string(), + send_at: now, + seq, + in_reply_to: None, + }; + + queue.publish(room_id, envelope).await?; + room_manager.metrics.messages_sent.increment(1); + + let event = queue::RoomMessageEvent { + id, + room_id, + sender_type: "ai".to_string(), + sender_id: None, + thread_id: None, + content: content.clone(), + content_type: "text".to_string(), + send_at: now, + seq, + display_name: model_display_name, + in_reply_to: None, + reactions: None, + }; + room_manager.broadcast(room_id, event).await; + + Self::publish_room_event_internal( + &db, + queue, + project_id, + super::RoomEventType::NewMessage, + Some(room_id), + Some(id), + Some(seq), + ) + .await; + + Ok(id) + } + + async fn get_room_history( + &self, + room_id: Uuid, + limit: usize, + ) -> Result, RoomError> { + use models::rooms::room_message::{Column as RmCol, Entity as RoomMessage}; + use sea_orm::EntityTrait; + + let messages = RoomMessage::find() + .filter(RmCol::Room.eq(room_id)) + .order_by_desc(RmCol::Seq) + .limit(limit as u64) + .all(&self.db) + .await?; + + Ok(messages) + } + + async fn extract_mention_context(&self, _content: &str) -> Vec { + Vec::new() + } + + async fn next_room_message_seq_internal( + room_id: Uuid, + db: &AppDatabase, + cache: &AppCache, + ) -> Result { + let seq_key = format!("room:seq:{}", room_id); + let mut conn = cache.conn().await.map_err(|e| { + RoomError::Internal(format!("failed to get redis connection for seq: {}", e)) + })?; + let seq: i64 = redis::cmd("INCR") + .arg(&seq_key) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("INCR seq: {}", e)))?; + + use models::rooms::room_message::{Column as RmCol, Entity as RoomMessage}; + use sea_orm::EntityTrait; + let db_seq: Option> = RoomMessage::find() + .filter(RmCol::Room.eq(room_id)) + .select_only() + .column_as(RmCol::Seq.max(), "max_seq") + .into_tuple::>() + .one(db) + .await? + .map(|r| r); + let db_seq = db_seq.flatten().unwrap_or(0); + if db_seq >= seq { + let _: i64 = redis::cmd("SET") + .arg(&seq_key) + .arg(db_seq + 1) + .query_async(&mut conn) + .await + .map_err(|e| RoomError::Internal(format!("SET seq: {}", e)))?; + return Ok(db_seq + 1); + } + + Ok(seq) + } + + async fn publish_room_event_internal( + _db: &AppDatabase, + queue: &MessageProducer, + project_id: Uuid, + event_type: super::RoomEventType, + room_id: Option, + message_id: Option, + seq: Option, + ) { + let event = ProjectRoomEvent { + event_type: event_type.as_str().into(), + project_id, + room_id, + category_id: None, + message_id, + seq, + timestamp: Utc::now(), + }; + queue.publish_project_room_event(project_id, event).await; + } +} diff --git a/libs/room/src/thread.rs b/libs/room/src/thread.rs new file mode 100644 index 0000000..7224477 --- /dev/null +++ b/libs/room/src/thread.rs @@ -0,0 +1,105 @@ +use crate::error::RoomError; +use crate::service::RoomService; +use crate::ws_context::WsUserContext; +use chrono::Utc; +use models::rooms::{room_message, room_thread}; +use sea_orm::*; +use uuid::Uuid; + +impl RoomService { + pub async fn room_thread_list( + &self, + room_id: Uuid, + ctx: &WsUserContext, + ) -> Result, RoomError> { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let models = room_thread::Entity::find() + .filter(room_thread::Column::Room.eq(room_id)) + .order_by_desc(room_thread::Column::LastMessageAt) + .all(&self.db) + .await?; + + Ok(models + .into_iter() + .map(super::RoomThreadResponse::from) + .collect()) + } + + pub async fn room_thread_create( + &self, + room_id: Uuid, + request: super::RoomThreadCreateRequest, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + self.require_room_member(room_id, user_id).await?; + + let parent = room_message::Entity::find() + .filter(room_message::Column::Room.eq(room_id)) + .filter(room_message::Column::Seq.eq(request.parent_seq)) + .one(&self.db) + .await?; + if parent.is_none() { + return Err(RoomError::NotFound("Parent message not found".to_string())); + } + + let now = Utc::now(); + let participants = serde_json::json!([user_id]); + let model = room_thread::ActiveModel { + id: Set(Uuid::now_v7()), + room: Set(room_id), + parent: Set(request.parent_seq), + created_by: Set(user_id), + participants: Set(participants), + last_message_at: Set(now), + last_message_preview: Set(None), + created_at: Set(now), + updated_at: Set(now), + } + .insert(&self.db) + .await?; + + Ok(super::RoomThreadResponse::from(model)) + } + + pub async fn room_thread_messages( + &self, + thread_id: Uuid, + before_seq: Option, + after_seq: Option, + limit: Option, + ctx: &WsUserContext, + ) -> Result { + let user_id = ctx.user_id; + let thread = room_thread::Entity::find_by_id(thread_id) + .one(&self.db) + .await? + .ok_or_else(|| RoomError::NotFound("Thread not found".to_string()))?; + self.require_room_member(thread.room, user_id).await?; + + let mut query = + room_message::Entity::find().filter(room_message::Column::Thread.eq(Some(thread.id))); + if let Some(before_seq) = before_seq { + query = query.filter(room_message::Column::Seq.lt(before_seq)); + } + if let Some(after_seq) = after_seq { + query = query.filter(room_message::Column::Seq.gt(after_seq)); + } + + let total = query.clone().count(&self.db).await? as i64; + let models = query + .order_by_desc(room_message::Column::Seq) + .limit(limit.unwrap_or(50)) + .all(&self.db) + .await?; + let mut messages: Vec = models + .into_iter() + .map(super::RoomMessageResponse::from) + .collect(); + messages.reverse(); + + Ok(super::RoomMessageListResponse { messages, total }) + } +} diff --git a/libs/room/src/types.rs b/libs/room/src/types.rs new file mode 100644 index 0000000..ec14e66 --- /dev/null +++ b/libs/room/src/types.rs @@ -0,0 +1,345 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct UserInfo { + pub uid: Uuid, + pub username: String, + pub avatar_url: Option, +} + +pub const MAX_ROOM_NAME_LEN: usize = 100; +pub const MAX_CATEGORY_NAME_LEN: usize = 100; +pub const MAX_MESSAGE_CONTENT_LEN: usize = 10000; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema)] +#[serde(rename_all = "snake_case")] +pub enum RoomEventType { + RoomCreated, + RoomDeleted, + RoomRenamed, + RoomMoved, + CategoryCreated, + CategoryDeleted, + CategoryUpdated, + NewMessage, + MessageEdited, + MessageRevoked, + MessagePinned, + MessageUnpinned, + MemberJoined, + MemberRemoved, + ReadReceipt, + ReactionAdded, + ReactionRemoved, +} + +impl RoomEventType { + pub fn as_str(&self) -> &'static str { + match self { + RoomEventType::RoomCreated => "room_created", + RoomEventType::RoomDeleted => "room_deleted", + RoomEventType::RoomRenamed => "room_renamed", + RoomEventType::RoomMoved => "room_moved", + RoomEventType::CategoryCreated => "category_created", + RoomEventType::CategoryDeleted => "category_deleted", + RoomEventType::CategoryUpdated => "category_updated", + RoomEventType::NewMessage => "new_message", + RoomEventType::MessageEdited => "message_edited", + RoomEventType::MessageRevoked => "message_revoked", + RoomEventType::MessagePinned => "message_pinned", + RoomEventType::MessageUnpinned => "message_unpinned", + RoomEventType::MemberJoined => "member_joined", + RoomEventType::MemberRemoved => "member_removed", + RoomEventType::ReadReceipt => "read_receipt", + RoomEventType::ReactionAdded => "reaction_added", + RoomEventType::ReactionRemoved => "reaction_removed", + } + } + + pub fn from_str(s: &str) -> Option { + match s { + "room_created" => Some(RoomEventType::RoomCreated), + "room_deleted" => Some(RoomEventType::RoomDeleted), + "room_renamed" => Some(RoomEventType::RoomRenamed), + "room_moved" => Some(RoomEventType::RoomMoved), + "category_created" => Some(RoomEventType::CategoryCreated), + "category_deleted" => Some(RoomEventType::CategoryDeleted), + "category_updated" => Some(RoomEventType::CategoryUpdated), + "new_message" => Some(RoomEventType::NewMessage), + "message_edited" => Some(RoomEventType::MessageEdited), + "message_revoked" => Some(RoomEventType::MessageRevoked), + "message_pinned" => Some(RoomEventType::MessagePinned), + "message_unpinned" => Some(RoomEventType::MessageUnpinned), + "member_joined" => Some(RoomEventType::MemberJoined), + "member_removed" => Some(RoomEventType::MemberRemoved), + "read_receipt" => Some(RoomEventType::ReadReceipt), + "reaction_added" => Some(RoomEventType::ReactionAdded), + "reaction_removed" => Some(RoomEventType::ReactionRemoved), + _ => None, + } + } +} + +impl std::fmt::Display for RoomEventType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomCategoryCreateRequest { + pub name: String, + pub position: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomCategoryUpdateRequest { + pub name: Option, + pub position: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomCategoryResponse { + pub id: Uuid, + pub project: Uuid, + pub name: String, + pub position: i32, + pub created_by: Uuid, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomCreateRequest { + pub room_name: String, + pub public: bool, + pub category: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomUpdateRequest { + pub room_name: Option, + pub public: Option, + pub category: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomResponse { + pub id: Uuid, + pub project: Uuid, + pub room_name: String, + pub public: bool, + pub category: Option, + pub created_by: Uuid, + pub created_at: DateTime, + pub last_msg_at: DateTime, + #[serde(default)] + pub unread_count: i64, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMemberAddRequest { + pub user_id: Uuid, + pub role: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMemberRoleUpdateRequest { + pub user_id: Uuid, + pub role: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMemberReadSeqRequest { + pub last_read_seq: i64, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomMemberResponse { + pub room: Uuid, + pub user: Uuid, + pub user_info: Option, + pub role: String, + pub first_msg_in: Option>, + pub joined_at: Option>, + pub last_read_seq: Option, + pub do_not_disturb: bool, + pub dnd_start_hour: Option, + pub dnd_end_hour: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMemberUpdateDndRequest { + pub do_not_disturb: Option, + pub dnd_start_hour: Option, + pub dnd_end_hour: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMessageCreateRequest { + pub content: String, + pub content_type: Option, + #[serde(rename = "thread_id")] + pub thread: Option, + pub in_reply_to: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomMessageUpdateRequest { + pub content: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomMessageResponse { + pub id: Uuid, + pub seq: i64, + pub room: Uuid, + pub sender_type: String, + pub sender_id: Option, + pub display_name: Option, + pub thread: Option, + pub in_reply_to: Option, + pub content: String, + pub content_type: String, + pub edited_at: Option>, + pub send_at: DateTime, + pub revoked: Option>, + pub revoked_by: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomMessageListResponse { + pub messages: Vec, + pub total: i64, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomThreadCreateRequest { + pub parent_seq: i64, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomThreadResponse { + pub id: Uuid, + pub room: Uuid, + pub parent: i64, + pub created_by: Uuid, + pub participants: serde_json::Value, + pub last_message_at: DateTime, + pub last_message_preview: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomPinResponse { + pub room: Uuid, + pub message: Uuid, + pub pinned_by: Uuid, + pub pinned_at: DateTime, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct RoomAiUpsertRequest { + pub model: Uuid, + pub version: Option, + pub history_limit: Option, + pub system_prompt: Option, + pub temperature: Option, + pub max_tokens: Option, + pub use_exact: Option, + pub think: Option, + pub stream: Option, + pub min_score: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RoomAiResponse { + pub room: Uuid, + pub model: Uuid, + pub version: Option, + pub call_count: i64, + pub last_call_at: Option>, + pub history_limit: Option, + pub system_prompt: Option, + pub temperature: Option, + pub max_tokens: Option, + pub use_exact: bool, + pub think: bool, + pub stream: bool, + pub min_score: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema)] +#[serde(rename_all = "snake_case")] +pub enum NotificationType { + Mention, + Invitation, + RoleChange, + RoomCreated, + RoomDeleted, + SystemAnnouncement, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct NotificationCreateRequest { + pub notification_type: NotificationType, + pub user_id: Uuid, + pub title: String, + pub content: Option, + pub room_id: Option, + pub project_id: Uuid, + pub related_message_id: Option, + pub related_user_id: Option, + pub related_room_id: Option, + pub metadata: Option, + pub expires_at: Option>, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct NotificationResponse { + pub id: Uuid, + pub room: Option, + pub project: Option, + pub user_id: Option, + pub user_info: Option, + pub notification_type: String, + pub title: String, + pub content: Option, + pub related_message_id: Option, + pub related_user_id: Option, + pub related_room_id: Option, + pub metadata: serde_json::Value, + pub is_read: bool, + pub is_archived: bool, + pub created_at: DateTime, + pub read_at: Option>, + pub expires_at: Option>, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct NotificationListResponse { + pub notifications: Vec, + pub total: i64, + pub unread_count: i64, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct NotificationEvent { + pub event_type: String, + pub notification: NotificationResponse, + pub timestamp: DateTime, +} + +impl NotificationEvent { + pub fn new(notification: NotificationResponse) -> Self { + Self { + event_type: "notification_created".into(), + notification, + timestamp: Utc::now(), + } + } +} diff --git a/libs/room/src/ws_context.rs b/libs/room/src/ws_context.rs new file mode 100644 index 0000000..381edb5 --- /dev/null +++ b/libs/room/src/ws_context.rs @@ -0,0 +1,11 @@ +use uuid::Uuid; + +pub struct WsUserContext { + pub user_id: Uuid, +} + +impl WsUserContext { + pub fn new(user_id: Uuid) -> Self { + Self { user_id } + } +} diff --git a/libs/rpc/Cargo.toml b/libs/rpc/Cargo.toml new file mode 100644 index 0000000..9d28dfe --- /dev/null +++ b/libs/rpc/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "rpc" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "rpc" +[dependencies] + +[lints] +workspace = true diff --git a/libs/rpc/lib.rs b/libs/rpc/lib.rs new file mode 100644 index 0000000..b93cf3f --- /dev/null +++ b/libs/rpc/lib.rs @@ -0,0 +1,14 @@ +pub fn add(left: u64, right: u64) -> u64 { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/libs/service/Cargo.toml b/libs/service/Cargo.toml new file mode 100644 index 0000000..c7349f9 --- /dev/null +++ b/libs/service/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "service" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "service" +[dependencies] +config = { workspace = true } +agent = { workspace = true } +db = { workspace = true } +models = { workspace = true } +email = { workspace = true } +avatar = { workspace = true } +git = { workspace = true } +git2 = { workspace = true } +queue = { workspace = true } +room = { workspace = true } +anyhow = { workspace = true } + +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true, features = [] } +slog = { workspace = true, features = ["anyhow"] } +captcha-rs = { workspace = true } +utoipa = { workspace = true, features = ["uuid", "chrono"] } +chrono = { workspace = true, features = ["serde"] } +session = { workspace = true } +argon2 = { workspace = true } +uuid = { workspace = true, features = ["serde", "v7"] } +sea-orm = { workspace = true, features = [] } +async-openai = { version = "0.34.0", features = ["chat-completion"] } +reqwest = { workspace = true, features = ["json"] } +base64 = { workspace = true } +rsa = { workspace = true } +rand = { workspace = true } +hex = { workspace = true } +sha2 = { workspace = true } +hmac = { workspace = true } +sha1 = { workspace = true } +redis = { workspace = true } +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +tracing = { workspace = true } +tokio-stream = { workspace = true } +futures = { workspace = true } +deadpool-redis = { workspace = true, features = ["rt_tokio_1", "cluster-async", "cluster"] } +moka = { workspace = true, features = ["future"] } +rust_decimal = { workspace = true } + +[lints] +workspace = true diff --git a/libs/service/agent/billing.rs b/libs/service/agent/billing.rs new file mode 100644 index 0000000..7dd135f --- /dev/null +++ b/libs/service/agent/billing.rs @@ -0,0 +1,197 @@ +//! AI usage billing — records token costs against a project or workspace balance. +//! +//! Called by service-layer code after each successful AI call. If the project +//! belongs to a workspace, the cost is deducted from the workspace's shared quota +//! (workspace_billing). Otherwise it is deducted from the project's own quota. +//! +//! 1. Queries the most recent active price for `model_id`. +//! 2. Computes `cost = (input/1000)*input_price + (output/1000)*output_price`. +//! 3. Determines whether to bill the project or its workspace. +//! 4. Writes a billing_history entry and decrements the appropriate balance. + +use crate::AppService; +use crate::error::AppError; +use models::agents::model_pricing; +use models::projects::project; +use models::projects::project_billing; +use models::projects::project_billing_history; +use models::workspaces::workspace_billing; +use models::workspaces::workspace_billing_history; +use rust_decimal::Decimal; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; + +/// Breakdown of a billing record. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct BillingRecord { + /// Total cost in the billing currency. + pub cost: f64, + pub currency: String, + pub input_tokens: i64, + pub output_tokens: i64, +} + +impl AppService { + /// Record AI usage for a project. + /// + /// If the project belongs to a workspace, the cost is deducted from the + /// workspace's shared quota. Otherwise it is deducted from the project's own + /// billing balance. + /// + /// Returns an error if there is insufficient balance. + pub async fn record_ai_usage( + &self, + project_uid: Uuid, + model_id: Uuid, + input_tokens: i64, + output_tokens: i64, + ) -> Result { + // 1. Look up the active price for this model. + let pricing = model_pricing::Entity::find() + .filter(model_pricing::Column::ModelVersionId.eq(model_id)) + .order_by_desc(model_pricing::Column::EffectiveFrom) + .one(&self.db) + .await? + .ok_or_else(|| { + AppError::InternalServerError( + "No pricing record found for this model. Please configure AI model pricing first." + .into(), + ) + })?; + + // 2. Compute cost using Decimal arithmetic. + let input_price: Decimal = pricing + .input_price_per_1k_tokens + .parse() + .unwrap_or(Decimal::ZERO); + let output_price: Decimal = pricing + .output_price_per_1k_tokens + .parse() + .unwrap_or(Decimal::ZERO); + let tokens_i = Decimal::from(input_tokens); + let tokens_o = Decimal::from(output_tokens); + let thousand = Decimal::from(1000); + + let total_cost: f64 = ((tokens_i / thousand) * input_price + + (tokens_o / thousand) * output_price) + .to_string() + .parse() + .unwrap_or(0.0); + + let currency = pricing.currency.clone(); + + // 3. Determine whether to bill the project or its workspace. + let proj = project::Entity::find_by_id(project_uid) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Project not found".to_string()))?; + + if let Some(workspace_id) = proj.workspace_id { + // ── Workspace-shared quota ────────────────────────────────── + let current = workspace_billing::Entity::find_by_id(workspace_id) + .one(&self.db) + .await? + .ok_or_else(|| { + AppError::NotFound("Workspace billing account not found".to_string()) + })?; + + let current_balance: f64 = current.balance.to_string().parse().unwrap_or(0.0); + + if current_balance < total_cost { + return Err(AppError::BadRequest(format!( + "Insufficient workspace billing balance. Required: {:.4} {}, Available: {:.4} {}", + total_cost, currency, current_balance, currency + ))); + } + + let amount_dec = Decimal::from_f64_retain(-total_cost).unwrap_or(Decimal::ZERO); + let now = chrono::Utc::now(); + + // Insert workspace billing history. + let _ = workspace_billing_history::ActiveModel { + uid: Set(Uuid::new_v4()), + workspace_id: Set(workspace_id), + user_id: Set(Some(proj.created_by)), + amount: Set(amount_dec), + currency: Set(currency.clone()), + reason: Set(format!("ai_usage:{}", project_uid)), + extra: Set(Some(serde_json::json!({ + "project_id": project_uid.to_string(), + "model_id": model_id.to_string(), + "input_tokens": input_tokens, + "output_tokens": output_tokens, + }))), + created_at: Set(now), + } + .insert(&self.db) + .await; + + // Deduct from workspace balance. + let new_balance = + Decimal::from_f64_retain(current_balance - total_cost).unwrap_or(Decimal::ZERO); + let mut updated: workspace_billing::ActiveModel = current.into(); + updated.balance = Set(new_balance); + updated.updated_at = Set(now); + updated.update(&self.db).await?; + + Ok(BillingRecord { + cost: total_cost, + currency, + input_tokens, + output_tokens, + }) + } else { + // ── Project-owned quota ───────────────────────────────────── + let amount_dec = Decimal::from_f64_retain(-total_cost).unwrap_or(Decimal::ZERO); + + let _ = project_billing_history::ActiveModel { + uid: Set(Uuid::new_v4()), + project: Set(project_uid), + user: Set(None), + amount: Set(amount_dec), + currency: Set(currency.clone()), + reason: Set("ai_usage".to_string()), + extra: Set(Some(serde_json::json!({ + "model_id": model_id.to_string(), + "input_tokens": input_tokens, + "output_tokens": output_tokens, + }))), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + let current = project_billing::Entity::find_by_id(project_uid) + .one(&self.db) + .await? + .ok_or_else(|| { + AppError::NotFound("Project billing account not found".to_string()) + })?; + + let current_balance: f64 = current.balance.to_string().parse().unwrap_or(0.0); + + if current_balance < total_cost { + return Err(AppError::BadRequest(format!( + "Insufficient billing balance. Required: {:.4} {}, Available: {:.4} {}", + total_cost, currency, current_balance, currency + ))); + } + + let new_balance = + Decimal::from_f64_retain(current_balance - total_cost).unwrap_or(Decimal::ZERO); + let mut updated: project_billing::ActiveModel = current.into(); + updated.balance = Set(new_balance); + updated.update(&self.db).await?; + + Ok(BillingRecord { + cost: total_cost, + currency, + input_tokens, + output_tokens, + }) + } + } +} diff --git a/libs/service/agent/code_review.rs b/libs/service/agent/code_review.rs new file mode 100644 index 0000000..5498f5f --- /dev/null +++ b/libs/service/agent/code_review.rs @@ -0,0 +1,544 @@ +//! AI-powered code review service. +//! +//! Analyzes PR diffs and posts structured review comments. +//! +//! Triggered automatically on PR creation (if `repo.ai_code_review_enabled`) or +//! manually via the `trigger_ai_code_review` API. + +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::ModelStatus; +use models::agents::model::{Column as MColumn, Entity as MEntity}; +use models::pull_request::pull_request_review_comment; +use models::repos::repo; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::billing::BillingRecord; + +const AI_BOT_UUID: Uuid = Uuid::nil(); + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ReviewSeverity { + Info, + Warning, + Error, +} + +impl std::fmt::Display for ReviewSeverity { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ReviewSeverity::Info => write!(f, "info"), + ReviewSeverity::Warning => write!(f, "warning"), + ReviewSeverity::Error => write!(f, "error"), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CodeReviewComment { + pub path: String, + pub line: Option, + pub old_line: Option, + pub side: Option, + pub body: String, + pub severity: ReviewSeverity, +} + +impl std::fmt::Display for CodeReviewComment { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!( + f, + "**{}** | `{}:{}`", + self.severity, + self.path, + self.line.map(|l| l.to_string()).unwrap_or_default() + )?; + writeln!(f, "{}", self.body) + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct TriggerCodeReviewResponse { + pub comments_posted: usize, + pub comments: Vec, + pub billing: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct CommentCreated { + pub path: String, + pub line: Option, + pub severity: String, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct TriggerCodeReviewRequest { + pub pr_number: Option, + pub model_id: Option, +} + +impl AppService { + pub async fn trigger_ai_code_review( + &self, + namespace: String, + repo_name: String, + pr_number: Option, + model_id: Option, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + self.trigger_ai_code_review_internal(namespace, repo_name, pr_number, model_id, repo) + .await + } + + pub async fn trigger_ai_code_review_internal( + &self, + _namespace: String, + _repo_name: String, + pr_number: Option, + model_id: Option, + repo: repo::Model, + ) -> Result { + let pr = match pr_number { + Some(n) => models::pull_request::pull_request::Entity::find() + .filter(models::pull_request::pull_request::Column::Repo.eq(repo.id)) + .filter(models::pull_request::pull_request::Column::Number.eq(n)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Pull request not found".to_string()))?, + None => { + // Get the most recently created open PR + models::pull_request::pull_request::Entity::find() + .filter(models::pull_request::pull_request::Column::Repo.eq(repo.id)) + .order_by_desc(models::pull_request::pull_request::Column::CreatedAt) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("No open pull request found".to_string()))? + } + }; + + // Fetch the model first so we can use its context_length for diff truncation + let model = match model_id { + Some(id) => MEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model not found".to_string()))?, + None => MEntity::find() + .filter(MColumn::Status.eq(ModelStatus::Active.to_string())) + .order_by_asc(MColumn::Name) + .one(&self.db) + .await? + .ok_or_else(|| { + AppError::InternalServerError( + "No active AI model found. Please configure an AI model first.".into(), + ) + })?, + }; + + let diff = self + .get_pr_diff_for_review(&repo, &pr, &model.name, model.context_length) + .await?; + + let prompt = build_code_review_prompt(&pr, &diff); + + let ai_response = call_ai_model(&model.name, &prompt, &self.config).await?; + + // Record billing (non-fatal — log warning but don't fail the review). + let billing = self + .record_ai_usage( + repo.project, + model.id, + ai_response.input_tokens, + ai_response.output_tokens, + ) + .await + .inspect_err(|e| { + slog::warn!( + self.logs, + "failed to record AI billing for code review"; + "project" => %repo.project, + "error" => ?e + ); + }) + .ok(); + + let comments = parse_ai_response(&ai_response.content); + + if comments.is_empty() { + return Ok(TriggerCodeReviewResponse { + comments_posted: 0, + comments: vec![], + billing: None, + }); + } + + let mut created = Vec::new(); + let now = Utc::now(); + + for comment in &comments { + let max_id: Option> = pull_request_review_comment::Entity::find() + .filter(pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(pull_request_review_comment::Column::Number.eq(pr.number)) + .select_only() + .column_as(pull_request_review_comment::Column::Id.max(), "max_id") + .into_tuple::>() + .one(&self.db) + .await?; + let comment_id = max_id.flatten().unwrap_or(0) + 1; + + let body = format!( + "🤖 **AI Review** | *{severity}*\n\n{body}", + severity = comment.severity, + body = comment.body + ); + + let active = pull_request_review_comment::ActiveModel { + repo: Set(repo.id), + number: Set(pr.number), + id: Set(comment_id), + review: Set(None), + path: Set(Some(comment.path.clone())), + side: Set(comment.side.clone()), + line: Set(comment.line), + old_line: Set(comment.old_line), + body: Set(body), + author: Set(AI_BOT_UUID), + resolved: Set(false), + in_reply_to: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + + match active.insert(&self.db).await { + Ok(created_comment) => { + created.push(CommentCreated { + path: created_comment.path.unwrap_or_default(), + line: created_comment.line, + severity: comment.severity.to_string(), + }); + } + Err(e) => { + slog::warn!( + self.logs, + "failed to create AI review comment"; + "path" => %comment.path, + "error" => ?e + ); + } + } + } + + Ok(TriggerCodeReviewResponse { + comments_posted: created.len(), + comments: created, + billing, + }) + } + + async fn get_pr_diff_for_review( + &self, + repo: &repo::Model, + pr: &models::pull_request::pull_request::Model, + model_name: &str, + context_limit: i64, + ) -> Result { + let (base_oid, head_oid) = tokio::task::spawn_blocking({ + let base = pr.base.clone(); + let head = pr.head.clone(); + let repo_model = repo.clone(); + move || -> Result<(git2::Oid, git2::Oid), AppError> { + let domain = crate::git::GitDomain::from_model(repo_model)?; + let base_commit_oid = domain + .branch_target(&base) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!("Branch '{}' not found", base)) + })?; + let head_commit_oid = domain + .branch_target(&head) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!("Branch '{}' not found", head)) + })?; + let base_oid = base_commit_oid + .to_oid() + .map_err(|e| crate::git::GitError::Internal(e.to_string()))?; + let head_oid = head_commit_oid + .to_oid() + .map_err(|e| crate::git::GitError::Internal(e.to_string()))?; + Ok((base_oid, head_oid)) + } + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + // Get the unified diff as raw patch text + let diff_text = tokio::task::spawn_blocking({ + let repo_model = repo.clone(); + let base = base_oid.to_string(); + let head = head_oid.to_string(); + move || -> Result { + let domain = crate::git::GitDomain::from_model(repo_model)?; + + let base_oid = git2::Oid::from_str(&base) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + let head_oid = git2::Oid::from_str(&head) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + let old_tree = domain + .repo() + .find_tree(base_oid) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + let new_tree = domain + .repo() + .find_tree(head_oid) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + let diff = domain + .repo() + .diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + // Print as unified patch + let mut patch_buf: Vec = Vec::new(); + diff.print(git2::DiffFormat::Patch, |_delta, _hunk, line| { + patch_buf.extend_from_slice(line.content()); + patch_buf.push(b'\n'); + true + }) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + String::from_utf8(patch_buf) + .map_err(|e| AppError::InternalServerError(e.to_string())) + } + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + // Truncate if too large to stay within token budget. + // Reserve 4096 tokens for output + system overhead (~512 tokens). + let reserve = 4608; + match agent::tokent::truncate_to_token_budget( + &diff_text, + model_name, + context_limit as usize, + reserve, + ) { + Ok(truncated) if truncated.len() < diff_text.len() => { + let chars_shown = truncated.len(); + Ok(format!( + "[Diff truncated — showing first {} chars to fit token budget]\n{}", + chars_shown, truncated + )) + } + _ => Ok(diff_text), + } + } +} + +fn build_code_review_prompt(pr: &models::pull_request::pull_request::Model, diff: &str) -> String { + let severity_note = r#"Respond with a JSON array of review comments. Each comment must have: +- "path": string (file path, e.g. "src/main.rs") +- "line": number | null (line number in the new version, null for general comments) +- "old_line": number | null (only for deleted lines) +- "side": "RIGHT" | "LEFT" | null ("RIGHT" = addition, "LEFT" = deletion) +- "body": string (Markdown-formatted comment text) +- "severity": "info" | "warning" | "error" + +Example: +```json +[ + {"path": "src/main.rs", "line": 42, "side": "RIGHT", "body": "Consider using a constant here.", "severity": "info"} +] + +Only suggest fixes that are correct and safe. Do not suggest style preferences unless the codebase has an explicit style guide. Focus on bugs, security issues, performance problems, and code clarity. +"#; + + format!( + r#"You are a senior code reviewer. Review the following pull request. + +## Pull Request +Title: {title} +Description: {body} +Base branch: {base} +Head branch: {head} + +## Diff (unified format: +added, -removed) +```{diff} +{diff} +``` + +{severity_note}"#, + title = pr.title, + body = pr.body.as_deref().unwrap_or("(no description)"), + base = pr.base, + head = pr.head, + diff = diff, + severity_note = severity_note, + ) +} + +async fn call_ai_model( + model_name: &str, + prompt: &str, + app_config: &config::AppConfig, +) -> Result { + let api_key = app_config + .ai_api_key() + .map_err(|e| AppError::InternalServerError(format!("AI API key not configured: {}", e)))?; + + let base_url = app_config + .ai_basic_url() + .unwrap_or_else(|_| "https://api.openai.com".into()); + + let client_config = agent::AiClientConfig::new(api_key).with_base_url(base_url); + + let messages = vec![ + async_openai::types::chat::ChatCompletionRequestMessage::User( + async_openai::types::chat::ChatCompletionRequestUserMessage { + content: async_openai::types::chat::ChatCompletionRequestUserMessageContent::Text( + prompt.to_string(), + ), + ..Default::default() + }, + ), + ]; + + agent::call_with_params(&messages, model_name, &client_config, 0.2, 8192, None, None) + .await + .map_err(|e| AppError::InternalServerError(format!("AI call failed: {}", e))) +} + +fn parse_ai_response(response: &str) -> Vec { + // Try to extract a JSON code block from the response + let json_str = extract_json_block(response).unwrap_or_else(|| response.to_string()); + + let parsed: Result, _> = serde_json::from_str(&json_str); + + match parsed { + Ok(comments) => comments, + Err(_) => { + // Try to salvage by looking for common patterns + extract_fallback_comments(response) + } + } +} + +fn extract_json_block(text: &str) -> Option { + // Look for ```json ... ``` blocks + for line in text.lines() { + let line = line.trim(); + if line.starts_with("```json") || line.starts_with("```") { + // Collect until closing ``` + let mut inside = false; + let mut buf = String::new(); + for l in text.lines() { + let l = l.trim(); + if l == "```json" || l == "```" { + inside = !inside; + continue; + } + if inside { + buf.push_str(l); + buf.push('\n'); + } + } + if !buf.trim().is_empty() { + return Some(buf.trim().to_string()); + } + } + } + + // Try inline JSON array at start + if text.trim().starts_with('[') { + Some(text.trim().to_string()) + } else { + None + } +} + +fn extract_fallback_comments(text: &str) -> Vec { + // Simple heuristic: look for lines like "path: src/main.rs" or "src/main.rs:42" + let mut comments = Vec::new(); + let mut current_path = String::new(); + let mut current_line: Option = None; + let mut current_side: Option = None; + let mut current_severity = ReviewSeverity::Info; + let mut current_body = String::new(); + let mut in_body = false; + + for line in text.lines() { + let line = line.trim(); + if line.is_empty() { + if in_body && !current_body.is_empty() { + current_body.push_str(line); + current_body.push('\n'); + } + continue; + } + + // Detect path + line pattern + if let Some((path, rest)) = line.split_once(':') { + if rest.trim().parse::().is_ok() { + // Flush previous comment + if !current_path.is_empty() && !current_body.trim().is_empty() { + comments.push(CodeReviewComment { + path: current_path.clone(), + line: current_line, + old_line: None, + side: current_side.clone(), + body: current_body.trim().to_string(), + severity: current_severity.clone(), + }); + } + current_path = path.trim().to_string(); + current_line = rest.trim().parse().ok(); + current_side = None; + current_body.clear(); + in_body = true; + continue; + } + } + + // Detect severity markers + let has_severity = line.contains("[error]") + || line.starts_with("**Error**") + || line.contains("[warning]") + || line.starts_with("**Warning**"); + + if line.contains("[error]") || line.starts_with("**Error**") { + current_severity = ReviewSeverity::Error; + } else if line.contains("[warning]") || line.starts_with("**Warning**") { + current_severity = ReviewSeverity::Warning; + } + + if in_body || has_severity { + current_body.push_str(line); + current_body.push('\n'); + } + } + + // Flush last + if !current_path.is_empty() && !current_body.trim().is_empty() { + comments.push(CodeReviewComment { + path: current_path, + line: current_line, + old_line: None, + side: current_side, + body: current_body.trim().to_string(), + severity: current_severity, + }); + } + + comments +} diff --git a/libs/service/agent/mod.rs b/libs/service/agent/mod.rs new file mode 100644 index 0000000..eb41d69 --- /dev/null +++ b/libs/service/agent/mod.rs @@ -0,0 +1,11 @@ +pub mod model_capability; +pub mod model_parameter_profile; +pub mod model_pricing; +pub mod model_version; +pub mod provider; + +pub mod billing; +pub mod code_review; +pub mod model; +pub mod pr_summary; +pub mod sync; diff --git a/libs/service/agent/model.rs b/libs/service/agent/model.rs new file mode 100644 index 0000000..5f2cb71 --- /dev/null +++ b/libs/service/agent/model.rs @@ -0,0 +1,197 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::model; +use models::agents::{ + ModelCapability, ModelModality, ModelStatus, + model::{Column as MColumn, Entity as MEntity}, + model_provider::Entity as ProviderEntity, +}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::provider::require_system_caller; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateModelRequest { + pub provider_id: Uuid, + pub name: String, + pub modality: String, + pub capability: String, + pub context_length: i64, + pub max_output_tokens: Option, + pub training_cutoff: Option>, + #[serde(default)] + pub is_open_source: bool, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateModelRequest { + pub display_name: Option, + pub modality: Option, + pub capability: Option, + pub context_length: Option, + pub max_output_tokens: Option, + pub training_cutoff: Option>, + pub is_open_source: Option, + pub status: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ModelResponse { + pub id: Uuid, + pub provider_id: Uuid, + pub name: String, + pub modality: String, + pub capability: String, + pub context_length: i64, + pub max_output_tokens: Option, + pub training_cutoff: Option>, + pub is_open_source: bool, + pub status: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for ModelResponse { + fn from(m: model::Model) -> Self { + Self { + id: m.id, + provider_id: m.provider_id, + name: m.name, + modality: m.modality, + capability: m.capability, + context_length: m.context_length, + max_output_tokens: m.max_output_tokens, + training_cutoff: m.training_cutoff, + is_open_source: m.is_open_source, + status: m.status, + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} + +impl AppService { + pub async fn agent_model_list( + &self, + provider_id: Option, + _ctx: &Session, + ) -> Result, AppError> { + let mut query = MEntity::find().order_by_asc(MColumn::Name); + if let Some(pid) = provider_id { + query = query.filter(MColumn::ProviderId.eq(pid)); + } + let models = query.all(&self.db).await?; + Ok(models.into_iter().map(ModelResponse::from).collect()) + } + + pub async fn agent_model_get( + &self, + id: Uuid, + _ctx: &Session, + ) -> Result { + let model = MEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model not found".to_string()))?; + Ok(ModelResponse::from(model)) + } + + pub async fn agent_model_create( + &self, + request: CreateModelRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + ProviderEntity::find_by_id(request.provider_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Provider not found".to_string()))?; + + let _ = request + .modality + .parse::() + .map_err(|_| AppError::BadRequest("Invalid modality".to_string()))?; + let _ = request + .capability + .parse::() + .map_err(|_| AppError::BadRequest("Invalid capability".to_string()))?; + + let now = Utc::now(); + let active = model::ActiveModel { + id: Set(Uuid::now_v7()), + provider_id: Set(request.provider_id), + name: Set(request.name), + modality: Set(request.modality), + capability: Set(request.capability), + context_length: Set(request.context_length), + max_output_tokens: Set(request.max_output_tokens), + training_cutoff: Set(request.training_cutoff), + is_open_source: Set(request.is_open_source), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + Ok(ModelResponse::from(model)) + } + + pub async fn agent_model_update( + &self, + id: Uuid, + request: UpdateModelRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let model = MEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model not found".to_string()))?; + + let mut active: model::ActiveModel = model.into(); + if let Some(modality) = request.modality { + let _ = modality + .parse::() + .map_err(|_| AppError::BadRequest("Invalid modality".to_string()))?; + active.modality = Set(modality); + } + if let Some(capability) = request.capability { + let _ = capability + .parse::() + .map_err(|_| AppError::BadRequest("Invalid capability".to_string()))?; + active.capability = Set(capability); + } + if let Some(context_length) = request.context_length { + active.context_length = Set(context_length); + } + if let Some(max_output_tokens) = request.max_output_tokens { + active.max_output_tokens = Set(Some(max_output_tokens)); + } + if let Some(training_cutoff) = request.training_cutoff { + active.training_cutoff = Set(Some(training_cutoff)); + } + if let Some(is_open_source) = request.is_open_source { + active.is_open_source = Set(is_open_source); + } + if let Some(status) = request.status { + active.status = Set(status); + } + active.updated_at = Set(Utc::now()); + + let model = active.update(&self.db).await?; + Ok(ModelResponse::from(model)) + } + + pub async fn agent_model_delete(&self, id: Uuid, ctx: &Session) -> Result<(), AppError> { + require_system_caller(ctx)?; + MEntity::delete_by_id(id).exec(&self.db).await?; + Ok(()) + } +} diff --git a/libs/service/agent/model_capability.rs b/libs/service/agent/model_capability.rs new file mode 100644 index 0000000..aa00f66 --- /dev/null +++ b/libs/service/agent/model_capability.rs @@ -0,0 +1,137 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::CapabilityType; +use models::agents::model_capability; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; + +use super::provider::require_system_caller; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateModelCapabilityRequest { + pub model_version_id: i64, + pub capability: String, + #[serde(default)] + pub is_supported: bool, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateModelCapabilityRequest { + pub is_supported: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ModelCapabilityResponse { + pub id: i64, + pub model_version_id: i64, + pub capability: String, + pub is_supported: bool, + pub created_at: chrono::DateTime, +} + +impl From for ModelCapabilityResponse { + fn from(mc: model_capability::Model) -> Self { + Self { + id: mc.id, + model_version_id: mc.model_version_id, + capability: mc.capability, + is_supported: mc.is_supported, + created_at: mc.created_at, + } + } +} + +impl AppService { + pub async fn agent_model_capability_list( + &self, + model_version_id: i64, + _ctx: &Session, + ) -> Result, AppError> { + let caps = model_capability::Entity::find() + .filter(model_capability::Column::ModelVersionId.eq(model_version_id)) + .order_by_asc(model_capability::Column::Capability) + .all(&self.db) + .await?; + Ok(caps + .into_iter() + .map(ModelCapabilityResponse::from) + .collect()) + } + + pub async fn agent_model_capability_get( + &self, + id: i64, + _ctx: &Session, + ) -> Result { + let cap = model_capability::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "Capability record not found".to_string(), + ))?; + Ok(ModelCapabilityResponse::from(cap)) + } + + pub async fn agent_model_capability_create( + &self, + request: CreateModelCapabilityRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let _ = request + .capability + .parse::() + .map_err(|_| AppError::BadRequest("Invalid capability type".to_string()))?; + + let now = Utc::now(); + let active = model_capability::ActiveModel { + model_version_id: Set(request.model_version_id), + capability: Set(request.capability), + is_supported: Set(request.is_supported), + created_at: Set(now), + ..Default::default() + }; + let cap = active.insert(&self.db).await?; + Ok(ModelCapabilityResponse::from(cap)) + } + + pub async fn agent_model_capability_update( + &self, + id: i64, + request: UpdateModelCapabilityRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let cap = model_capability::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "Capability record not found".to_string(), + ))?; + + let mut active: model_capability::ActiveModel = cap.into(); + if let Some(is_supported) = request.is_supported { + active.is_supported = Set(is_supported); + } + + let cap = active.update(&self.db).await?; + Ok(ModelCapabilityResponse::from(cap)) + } + + pub async fn agent_model_capability_delete( + &self, + id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + require_system_caller(ctx)?; + model_capability::Entity::delete_by_id(id) + .exec(&self.db) + .await?; + Ok(()) + } +} diff --git a/libs/service/agent/model_parameter_profile.rs b/libs/service/agent/model_parameter_profile.rs new file mode 100644 index 0000000..b0e444a --- /dev/null +++ b/libs/service/agent/model_parameter_profile.rs @@ -0,0 +1,163 @@ +use crate::AppService; +use crate::error::AppError; +use models::agents::model_parameter_profile; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::provider::require_system_caller; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateModelParameterProfileRequest { + pub model_version_id: Uuid, + pub temperature_min: f64, + pub temperature_max: f64, + pub top_p_min: f64, + pub top_p_max: f64, + #[serde(default)] + pub frequency_penalty_supported: bool, + #[serde(default)] + pub presence_penalty_supported: bool, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateModelParameterProfileRequest { + pub temperature_min: Option, + pub temperature_max: Option, + pub top_p_min: Option, + pub top_p_max: Option, + pub frequency_penalty_supported: Option, + pub presence_penalty_supported: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ModelParameterProfileResponse { + pub id: i64, + pub model_version_id: Uuid, + pub temperature_min: f64, + pub temperature_max: f64, + pub top_p_min: f64, + pub top_p_max: f64, + pub frequency_penalty_supported: bool, + pub presence_penalty_supported: bool, +} + +impl From for ModelParameterProfileResponse { + fn from(p: model_parameter_profile::Model) -> Self { + Self { + id: p.id, + model_version_id: p.model_version_id, + temperature_min: p.temperature_min, + temperature_max: p.temperature_max, + top_p_min: p.top_p_min, + top_p_max: p.top_p_max, + frequency_penalty_supported: p.frequency_penalty_supported, + presence_penalty_supported: p.presence_penalty_supported, + } + } +} + +impl AppService { + pub async fn agent_model_parameter_profile_list( + &self, + model_version_id: Uuid, + _ctx: &Session, + ) -> Result, AppError> { + let profiles = model_parameter_profile::Entity::find() + .filter(model_parameter_profile::Column::ModelVersionId.eq(model_version_id)) + .all(&self.db) + .await?; + Ok(profiles + .into_iter() + .map(ModelParameterProfileResponse::from) + .collect()) + } + + pub async fn agent_model_parameter_profile_get( + &self, + id: i64, + _ctx: &Session, + ) -> Result { + let profile = model_parameter_profile::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "Parameter profile not found".to_string(), + ))?; + Ok(ModelParameterProfileResponse::from(profile)) + } + + pub async fn agent_model_parameter_profile_create( + &self, + request: CreateModelParameterProfileRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let active = model_parameter_profile::ActiveModel { + model_version_id: Set(request.model_version_id), + temperature_min: Set(request.temperature_min), + temperature_max: Set(request.temperature_max), + top_p_min: Set(request.top_p_min), + top_p_max: Set(request.top_p_max), + frequency_penalty_supported: Set(request.frequency_penalty_supported), + presence_penalty_supported: Set(request.presence_penalty_supported), + ..Default::default() + }; + let profile = active.insert(&self.db).await?; + Ok(ModelParameterProfileResponse::from(profile)) + } + + pub async fn agent_model_parameter_profile_update( + &self, + id: i64, + request: UpdateModelParameterProfileRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let profile = model_parameter_profile::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "Parameter profile not found".to_string(), + ))?; + + let mut active: model_parameter_profile::ActiveModel = profile.into(); + if let Some(v) = request.temperature_min { + active.temperature_min = Set(v); + } + if let Some(v) = request.temperature_max { + active.temperature_max = Set(v); + } + if let Some(v) = request.top_p_min { + active.top_p_min = Set(v); + } + if let Some(v) = request.top_p_max { + active.top_p_max = Set(v); + } + if let Some(v) = request.frequency_penalty_supported { + active.frequency_penalty_supported = Set(v); + } + if let Some(v) = request.presence_penalty_supported { + active.presence_penalty_supported = Set(v); + } + + let profile = active.update(&self.db).await?; + Ok(ModelParameterProfileResponse::from(profile)) + } + + pub async fn agent_model_parameter_profile_delete( + &self, + id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + require_system_caller(ctx)?; + model_parameter_profile::Entity::delete_by_id(id) + .exec(&self.db) + .await?; + Ok(()) + } +} diff --git a/libs/service/agent/model_pricing.rs b/libs/service/agent/model_pricing.rs new file mode 100644 index 0000000..6af0f6b --- /dev/null +++ b/libs/service/agent/model_pricing.rs @@ -0,0 +1,148 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::PricingCurrency; +use models::agents::model_pricing; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::provider::require_system_caller; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateModelPricingRequest { + pub model_version_id: Uuid, + pub input_price_per_1k_tokens: String, + pub output_price_per_1k_tokens: String, + pub currency: String, + pub effective_from: chrono::DateTime, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateModelPricingRequest { + pub input_price_per_1k_tokens: Option, + pub output_price_per_1k_tokens: Option, + pub currency: Option, + pub effective_from: Option>, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ModelPricingResponse { + pub id: i64, + pub model_version_id: Uuid, + pub input_price_per_1k_tokens: String, + pub output_price_per_1k_tokens: String, + pub currency: String, + pub effective_from: chrono::DateTime, +} + +impl From for ModelPricingResponse { + fn from(p: model_pricing::Model) -> Self { + Self { + id: p.id, + model_version_id: p.model_version_id, + input_price_per_1k_tokens: p.input_price_per_1k_tokens, + output_price_per_1k_tokens: p.output_price_per_1k_tokens, + currency: p.currency, + effective_from: p.effective_from, + } + } +} + +impl AppService { + pub async fn agent_model_pricing_list( + &self, + model_version_id: Uuid, + _ctx: &Session, + ) -> Result, AppError> { + let records = model_pricing::Entity::find() + .filter(model_pricing::Column::ModelVersionId.eq(model_version_id)) + .order_by_desc(model_pricing::Column::EffectiveFrom) + .all(&self.db) + .await?; + Ok(records + .into_iter() + .map(ModelPricingResponse::from) + .collect()) + } + + pub async fn agent_model_pricing_get( + &self, + id: i64, + _ctx: &Session, + ) -> Result { + let record = model_pricing::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pricing record not found".to_string()))?; + Ok(ModelPricingResponse::from(record)) + } + + pub async fn agent_model_pricing_create( + &self, + request: CreateModelPricingRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let _ = request + .currency + .parse::() + .map_err(|_| AppError::BadRequest("Invalid pricing currency".to_string()))?; + + let active = model_pricing::ActiveModel { + model_version_id: Set(request.model_version_id), + input_price_per_1k_tokens: Set(request.input_price_per_1k_tokens), + output_price_per_1k_tokens: Set(request.output_price_per_1k_tokens), + currency: Set(request.currency), + effective_from: Set(request.effective_from), + ..Default::default() + }; + let record = active.insert(&self.db).await?; + Ok(ModelPricingResponse::from(record)) + } + + pub async fn agent_model_pricing_update( + &self, + id: i64, + request: UpdateModelPricingRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let record = model_pricing::Entity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pricing record not found".to_string()))?; + + let mut active: model_pricing::ActiveModel = record.into(); + if let Some(v) = request.input_price_per_1k_tokens { + active.input_price_per_1k_tokens = Set(v); + } + if let Some(v) = request.output_price_per_1k_tokens { + active.output_price_per_1k_tokens = Set(v); + } + if let Some(v) = request.currency { + let _ = v + .parse::() + .map_err(|_| AppError::BadRequest("Invalid pricing currency".to_string()))?; + active.currency = Set(v); + } + if let Some(v) = request.effective_from { + active.effective_from = Set(v); + } + + let record = active.update(&self.db).await?; + Ok(ModelPricingResponse::from(record)) + } + + pub async fn agent_model_pricing_delete(&self, id: i64, ctx: &Session) -> Result<(), AppError> { + require_system_caller(ctx)?; + model_pricing::Entity::delete_by_id(id) + .exec(&self.db) + .await?; + Ok(()) + } +} diff --git a/libs/service/agent/model_version.rs b/libs/service/agent/model_version.rs new file mode 100644 index 0000000..573c1b2 --- /dev/null +++ b/libs/service/agent/model_version.rs @@ -0,0 +1,158 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::model_version; +use models::agents::{ + ModelStatus, + model_version::{Column as MVColumn, Entity as MVEntity, Model as ModelVersionModel}, +}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::provider::require_system_caller; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateModelVersionRequest { + pub model_id: Uuid, + pub version: String, + pub release_date: Option>, + pub change_log: Option, + #[serde(default)] + pub is_default: bool, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateModelVersionRequest { + pub version: Option, + pub release_date: Option>, + pub change_log: Option, + pub is_default: Option, + pub status: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ModelVersionResponse { + pub id: Uuid, + pub model_id: Uuid, + pub version: String, + pub release_date: Option>, + pub change_log: Option, + pub is_default: bool, + pub status: String, + pub created_at: chrono::DateTime, +} + +impl From for ModelVersionResponse { + fn from(mv: ModelVersionModel) -> Self { + Self { + id: mv.id, + model_id: mv.model_id, + version: mv.version, + release_date: mv.release_date, + change_log: mv.change_log, + is_default: mv.is_default, + status: mv.status, + created_at: mv.created_at, + } + } +} + +impl AppService { + pub async fn agent_model_version_list( + &self, + model_id: Option, + _ctx: &Session, + ) -> Result, AppError> { + let mut query = MVEntity::find().order_by_asc(MVColumn::Version); + if let Some(mid) = model_id { + query = query.filter(MVColumn::ModelId.eq(mid)); + } + let versions = query.all(&self.db).await?; + Ok(versions + .into_iter() + .map(ModelVersionResponse::from) + .collect()) + } + + pub async fn agent_model_version_get( + &self, + id: Uuid, + _ctx: &Session, + ) -> Result { + let version = MVEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model version not found".to_string()))?; + Ok(ModelVersionResponse::from(version)) + } + + pub async fn agent_model_version_create( + &self, + request: CreateModelVersionRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let now = Utc::now(); + let active = model_version::ActiveModel { + id: Set(Uuid::now_v7()), + model_id: Set(request.model_id), + version: Set(request.version), + release_date: Set(request.release_date), + change_log: Set(request.change_log), + is_default: Set(request.is_default), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + ..Default::default() + }; + let version = active.insert(&self.db).await?; + Ok(ModelVersionResponse::from(version)) + } + + pub async fn agent_model_version_update( + &self, + id: Uuid, + request: UpdateModelVersionRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let version = MVEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model version not found".to_string()))?; + + let mut active: model_version::ActiveModel = version.into(); + if let Some(version) = request.version { + active.version = Set(version); + } + if let Some(release_date) = request.release_date { + active.release_date = Set(Some(release_date)); + } + if let Some(change_log) = request.change_log { + active.change_log = Set(Some(change_log)); + } + if let Some(is_default) = request.is_default { + active.is_default = Set(is_default); + } + if let Some(status) = request.status { + active.status = Set(status); + } + + let version = active.update(&self.db).await?; + Ok(ModelVersionResponse::from(version)) + } + + pub async fn agent_model_version_delete( + &self, + id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + require_system_caller(ctx)?; + MVEntity::delete_by_id(id).exec(&self.db).await?; + Ok(()) + } +} diff --git a/libs/service/agent/pr_summary.rs b/libs/service/agent/pr_summary.rs new file mode 100644 index 0000000..a61e0e5 --- /dev/null +++ b/libs/service/agent/pr_summary.rs @@ -0,0 +1,374 @@ +//! AI-powered PR description generation. +//! +//! Generates a structured description for pull requests based on the diff. + +use crate::AppService; +use crate::error::AppError; +use crate::git::GitDomain; +use chrono::Utc; +use models::agents::ModelStatus; +use models::agents::model::{Column as MColumn, Entity as MEntity}; +use models::pull_request::pull_request; +use models::repos::repo; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +use super::billing::BillingRecord; + +/// Structured PR description generated by AI. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct PrDescription { + /// 3-5 line summary of what this PR does. + pub summary: String, + /// Key changes made in this PR. + pub changes: Vec, + /// Potential risks or things to watch out for. + #[serde(default)] + pub risks: Vec, + /// Suggested test scenarios. + #[serde(default)] + pub tests: Vec, +} + +/// Response from the AI description generation endpoint. +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct GeneratePrDescriptionResponse { + pub description: PrDescription, + /// Markdown-formatted description ready to paste into the PR body. + pub markdown_body: String, + pub billing: Option, +} + +/// Request body for generating a PR description. +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct GeneratePrDescriptionRequest { + /// PR number to generate description for. + pub pr_number: Option, + /// Override the default AI model for this generation. + pub model_id: Option, +} + +/// Internal response (passed from PR creation background task). +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct GeneratedPrDescription { + pub body: String, + pub created_by_ai: bool, +} + +/// Build a prompt for PR description generation. +fn build_description_prompt(title: &str, body: Option<&str>, diff: &str) -> String { + let existing_desc = body + .map(|b| format!("Existing user description:\n{}", b)) + .unwrap_or_default(); + format!( + r#"You are an expert code reviewer. Generate a clear, concise pull request description. + +PR Title: {title} +{existing_desc} + +Changed files diff (truncated to key portions): +--- +{diff} +--- + +Based on the PR title and diff, generate a structured description in this exact JSON format: +{{ + "summary": "A 3-5 line summary of what this PR does and why", + "changes": ["List of key changes, one per item"], + "risks": ["Potential risks or considerations, if any"], + "tests": ["Suggested test scenarios to verify this PR"] +}} + +Respond with ONLY the JSON object, no markdown code fences or extra text."# + ) +} + +/// Format the structured description as a markdown PR body. +fn format_as_markdown(_title: &str, desc: &PrDescription) -> String { + let mut lines = vec![format!("## Summary\n\n{}", desc.summary)]; + + if !desc.changes.is_empty() { + lines.push("\n## Changes\n\n".to_string()); + for change in &desc.changes { + lines.push(format!("- {}", change)); + } + } + + if !desc.risks.is_empty() { + lines.push("\n## Risks & Considerations\n\n".to_string()); + for risk in &desc.risks { + lines.push(format!("- ⚠️ {}", risk)); + } + } + + if !desc.tests.is_empty() { + lines.push("\n## Testing\n\n".to_string()); + for test in &desc.tests { + lines.push(format!("- {}", test)); + } + } + + lines.push("\n---\n".to_string()); + lines.push(format!( + "*🤖 Generated by AI · {}", + Utc::now().format("%Y-%m-%d") + )); + + lines.join("\n") +} + +/// Call the AI model with a prompt and return the text response. +async fn call_ai_model_for_description( + model_name: &str, + prompt: &str, + app_config: &config::AppConfig, +) -> Result { + let api_key = app_config + .ai_api_key() + .map_err(|e| AppError::InternalServerError(format!("AI API key not configured: {}", e)))?; + + let base_url = app_config + .ai_basic_url() + .unwrap_or_else(|_| "https://api.openai.com".into()); + + let client_config = agent::AiClientConfig::new(api_key).with_base_url(base_url); + + let messages = vec![ + async_openai::types::chat::ChatCompletionRequestMessage::User( + async_openai::types::chat::ChatCompletionRequestUserMessage { + content: async_openai::types::chat::ChatCompletionRequestUserMessageContent::Text( + prompt.to_string(), + ), + ..Default::default() + }, + ), + ]; + + agent::call_with_params(&messages, model_name, &client_config, 0.3, 4096, None, None) + .await + .map_err(|e| AppError::InternalServerError(format!("AI call failed: {}", e))) +} + +/// Extract JSON from a response that may contain markdown code fences. +fn extract_json(s: &str) -> Option { + // Try to find a JSON code block + if let Some(start) = s.find("```json") { + let rest = &s[start + 7..]; + if let Some(end) = rest.find("```") { + return Some(rest[..end].trim().to_string()); + } + } + if let Some(start) = s.find("```") { + let rest = &s[start + 3..]; + if let Some(end) = rest.find("```") { + return Some(rest[..end].trim().to_string()); + } + } + // Try raw JSON + let trimmed = s.trim(); + if trimmed.starts_with('{') || trimmed.starts_with('[') { + return Some(trimmed.to_string()); + } + None +} + +impl AppService { + /// Public entry point — performs session auth then delegates. + pub async fn generate_pr_description( + &self, + namespace: String, + repo_name: String, + request: GeneratePrDescriptionRequest, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = match request.pr_number { + Some(n) => pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(n)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Pull request not found".to_string()))?, + None => pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .order_by_desc(pull_request::Column::CreatedAt) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("No pull request found".to_string()))?, + }; + + self.generate_pr_description_internal(pr, repo, request.model_id) + .await + } + + /// Internal entry point — skips auth. Used by background tasks. + pub async fn generate_pr_description_internal( + &self, + pr: pull_request::Model, + repo: repo::Model, + model_id: Option, + ) -> Result { + // Find a model first so we can use its context limit for diff truncation + let model = match model_id { + Some(id) => MEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Model not found".to_string()))?, + None => MEntity::find() + .filter(MColumn::Status.eq(ModelStatus::Active.to_string())) + .order_by_asc(MColumn::Name) + .one(&self.db) + .await? + .ok_or_else(|| { + AppError::InternalServerError( + "No active AI model found. Please configure an AI model first.".into(), + ) + })?, + }; + + // Get the diff with token-aware truncation + let diff = self + .get_pr_description_diff(&repo, &pr, &model.name, model.context_length) + .await?; + + // Build prompt and call AI + let prompt = build_description_prompt(&pr.title, pr.body.as_deref(), &diff); + let ai_response = call_ai_model_for_description(&model.name, &prompt, &self.config).await?; + + // Record billing (non-fatal). + let billing = self + .record_ai_usage( + repo.project, + model.id, + ai_response.input_tokens, + ai_response.output_tokens, + ) + .await + .inspect_err(|e| { + slog::warn!( + self.logs, + "failed to record AI billing for PR description"; + "project" => %repo.project, + "error" => ?e + ); + }) + .ok(); + + // Parse JSON response + let json_str = + extract_json(&ai_response.content).unwrap_or_else(|| ai_response.content.clone()); + + let pr_desc: PrDescription = serde_json::from_str(&json_str).map_err(|e| { + AppError::InternalServerError(format!( + "Failed to parse AI response as JSON: {}. Raw: {}", + e, + &ai_response.content[..ai_response.content.len().min(200)] + )) + })?; + + let markdown_body = format_as_markdown(&pr.title, &pr_desc); + + Ok(GeneratePrDescriptionResponse { + description: pr_desc, + markdown_body, + billing, + }) + } + + /// Get the diff for PR description generation (unified format, truncated). + async fn get_pr_description_diff( + &self, + repo: &repo::Model, + pr: &pull_request::Model, + model_name: &str, + context_limit: i64, + ) -> Result { + let oids_result = { + let base = pr.base.clone(); + let head = pr.head.clone(); + let repo_model = repo.clone(); + let handle: tokio::task::JoinHandle> = + tokio::task::spawn_blocking(move || { + let domain = GitDomain::from_model(repo_model)?; + let base_commit_oid = domain + .branch_target(&base) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + AppError::NotFound(format!("Branch '{}' not found", base)) + })?; + let head_commit_oid = domain + .branch_target(&head) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + AppError::NotFound(format!("Branch '{}' not found", head)) + })?; + let base_oid = base_commit_oid.to_oid().map_err(|e| { + AppError::InternalServerError(format!("Invalid OID: {}", e)) + })?; + let head_oid = head_commit_oid.to_oid().map_err(|e| { + AppError::InternalServerError(format!("Invalid OID: {}", e)) + })?; + Ok((base_oid, head_oid)) + }); + handle + } + .await + .map_err(|e| AppError::InternalServerError(format!("Join error: {}", e)))?; + let (base_oid, head_oid) = oids_result.map_err(|e| { + AppError::InternalServerError(format!("Failed to resolve branch OIDs: {:?}", e)) + })?; + + let repo_for_diff = repo.clone(); + let diff_text = tokio::task::spawn_blocking(move || -> Result { + let domain = GitDomain::from_model(repo_for_diff)?; + let old_tree = domain + .repo() + .find_tree(base_oid) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + let new_tree = domain + .repo() + .find_tree(head_oid) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + let diff = domain + .repo() + .diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + let mut patch_buf: Vec = Vec::new(); + diff.print(git2::DiffFormat::Patch, |_delta, _hunk, line| { + patch_buf.extend_from_slice(line.content()); + patch_buf.push(b'\n'); + true + }) + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + String::from_utf8(patch_buf).map_err(|e| AppError::InternalServerError(e.to_string())) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + // Truncate to avoid token limits with token-aware budgeting. + // Reserve 4096 tokens for output + system overhead. + let reserve = 4096; + match agent::tokent::truncate_to_token_budget( + &diff_text, + model_name, + context_limit as usize, + reserve, + ) { + Ok(truncated) if truncated.len() < diff_text.len() => Ok(format!( + "{}...\n[diff truncated to fit token budget]", + truncated + )), + _ => Ok(diff_text), + } + } +} diff --git a/libs/service/agent/provider.rs b/libs/service/agent/provider.rs new file mode 100644 index 0000000..f5853bc --- /dev/null +++ b/libs/service/agent/provider.rs @@ -0,0 +1,138 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::agents::model_provider; +use models::agents::{ModelStatus, model_provider::Entity as ProviderEntity}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateProviderRequest { + pub name: String, + pub display_name: String, + pub website: Option, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateProviderRequest { + pub display_name: Option, + pub website: Option, + pub status: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ProviderResponse { + pub id: Uuid, + pub name: String, + pub display_name: String, + pub website: Option, + pub status: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for ProviderResponse { + fn from(p: model_provider::Model) -> Self { + Self { + id: p.id, + name: p.name, + display_name: p.display_name, + website: p.website, + status: p.status, + created_at: p.created_at, + updated_at: p.updated_at, + } + } +} + +pub(crate) fn require_system_caller(ctx: &Session) -> Result<(), AppError> { + if ctx.user() != Some(Uuid::nil()) { + return Err(AppError::Unauthorized); + } + Ok(()) +} + +impl AppService { + pub async fn agent_provider_list( + &self, + _ctx: &Session, + ) -> Result, AppError> { + let providers = ProviderEntity::find() + .order_by_asc(model_provider::Column::DisplayName) + .all(&self.db) + .await?; + Ok(providers.into_iter().map(ProviderResponse::from).collect()) + } + + pub async fn agent_provider_get( + &self, + id: Uuid, + _ctx: &Session, + ) -> Result { + let provider = ProviderEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Provider not found".to_string()))?; + Ok(ProviderResponse::from(provider)) + } + + pub async fn agent_provider_create( + &self, + request: CreateProviderRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let now = Utc::now(); + let active = model_provider::ActiveModel { + id: Set(Uuid::now_v7()), + name: Set(request.name), + display_name: Set(request.display_name), + website: Set(request.website), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + Ok(ProviderResponse::from(model)) + } + + pub async fn agent_provider_update( + &self, + id: Uuid, + request: UpdateProviderRequest, + ctx: &Session, + ) -> Result { + require_system_caller(ctx)?; + + let provider = ProviderEntity::find_by_id(id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Provider not found".to_string()))?; + + let mut active: model_provider::ActiveModel = provider.into(); + if let Some(display_name) = request.display_name { + active.display_name = Set(display_name); + } + if let Some(website) = request.website { + active.website = Set(Some(website)); + } + if let Some(status) = request.status { + active.status = Set(status); + } + active.updated_at = Set(Utc::now()); + + let model = active.update(&self.db).await?; + Ok(ProviderResponse::from(model)) + } + + pub async fn agent_provider_delete(&self, id: Uuid, ctx: &Session) -> Result<(), AppError> { + require_system_caller(ctx)?; + ProviderEntity::delete_by_id(id).exec(&self.db).await?; + Ok(()) + } +} diff --git a/libs/service/agent/sync.rs b/libs/service/agent/sync.rs new file mode 100644 index 0000000..4f9883e --- /dev/null +++ b/libs/service/agent/sync.rs @@ -0,0 +1,638 @@ +//! Synchronizes AI models from OpenRouter into the local database. +//! +//! Fetches the full model list via OpenRouter's `/api/v1/models` endpoint +//! (requires `OPENROUTER_API_KEY` in config or falls back to `AI_API_KEY`). +//! +//! OpenRouter returns rich metadata per model including `context_length`, +//! `pricing`, and `architecture.modality` — these are used to populate all +//! five model tables without any hard-coded heuristics. + +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use db::database::AppDatabase; +use models::agents::model::Entity as ModelEntity; +use models::agents::model_capability::Entity as CapabilityEntity; +use models::agents::model_parameter_profile::Entity as ProfileEntity; +use models::agents::model_pricing::Entity as PricingEntity; +use models::agents::model_provider::Entity as ProviderEntity; +use models::agents::model_provider::Model as ProviderModel; +use models::agents::model_version::Entity as VersionEntity; +use models::agents::{CapabilityType, ModelCapability, ModelModality, ModelStatus}; +use sea_orm::prelude::*; +use sea_orm::Set; +use serde::Deserialize; +use serde::Serialize; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +// OpenRouter API types ------------------------------------------------------- + +#[derive(Debug, Clone, Deserialize)] +struct OpenRouterResponse { + data: Vec, +} + +#[derive(Debug, Clone, Deserialize)] +struct OpenRouterModel { + id: String, + name: Option, + #[serde(default)] + description: Option, + pricing: Option, + #[serde(default)] + context_length: Option, + #[serde(default)] + architecture: Option, + #[serde(default)] + top_provider: Option, +} + +#[derive(Debug, Clone, Deserialize)] +struct OpenRouterPricing { + prompt: String, + completion: String, + #[serde(default)] + request: Option, + #[serde(default)] + image: Option, + #[serde(default)] + input_cache_read: Option, + #[serde(default)] + input_cache_write: Option, + #[serde(default)] + web_search: Option, + #[serde(default)] + internal_reasoning: Option, +} + +#[derive(Debug, Clone, Deserialize)] +struct OpenRouterArchitecture { + #[serde(default)] + modality: Option, + #[serde(default)] + input_modalities: Option>, + #[serde(default)] + output_modalities: Option>, + #[serde(default)] + tokenizer: Option, + #[serde(default)] + instruct_type: Option, +} + +#[derive(Debug, Clone, Deserialize)] +struct OpenRouterTopProvider { + #[serde(default)] + context_length: Option, + #[serde(default)] + max_completion_tokens: Option, + #[serde(default)] + is_moderated: Option, +} + +// Response type -------------------------------------------------------------- + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct SyncModelsResponse { + pub models_created: i64, + pub models_updated: i64, + pub versions_created: i64, + pub pricing_created: i64, + pub capabilities_created: i64, + pub profiles_created: i64, +} + +// Inference helpers (fallbacks when OpenRouter data is missing) --------------- + +fn infer_modality(name: &str, arch_modality: Option<&str>) -> ModelModality { + if let Some(m) = arch_modality { + let m = m.to_lowercase(); + if m.contains("text") || m.contains("chat") { + return ModelModality::Text; + } + if m.contains("image") || m.contains("vision") { + return ModelModality::Multimodal; + } + if m.contains("audio") || m.contains("speech") { + return ModelModality::Audio; + } + } + let lower = name.to_lowercase(); + if lower.contains("vision") + || lower.contains("dall-e") + || lower.contains("gpt-image") + || lower.contains("gpt-4o") + { + ModelModality::Multimodal + } else if lower.contains("embedding") { + ModelModality::Text + } else if lower.contains("whisper") || lower.contains("audio") { + ModelModality::Audio + } else { + ModelModality::Text + } +} + +fn infer_capability(name: &str) -> ModelCapability { + let lower = name.to_lowercase(); + if lower.contains("embedding") { + ModelCapability::Embedding + } else if lower.contains("code") { + ModelCapability::Code + } else { + ModelCapability::Chat + } +} + +fn infer_context_length(name: &str) -> i64 { + let lower = name.to_lowercase(); + // Hard-coded fallback table for known models + let fallbacks: &[(&str, i64)] = &[ + ("gpt-4o", 128_000), + ("chatgpt-4o", 128_000), + ("o1-preview", 128_000), + ("o1-mini", 65_536), + ("o1", 65_536), + ("o3-mini", 65_536), + ("gpt-4-turbo", 128_000), + ("gpt-4-32k", 32_768), + ("gpt-4", 8_192), + ("gpt-4o-mini", 128_000), + ("chatgpt-4o-mini", 128_000), + ("gpt-3.5-turbo-16k", 16_384), + ("gpt-3.5-turbo", 16_385), + ("text-embedding-3-large", 8_191), + ("text-embedding-3-small", 8_191), + ("text-embedding-ada", 8_191), + ("dall-e", 4_096), + ("whisper", 30_000), + ("gpt-image-1", 16_384), + ]; + for (prefix, ctx) in fallbacks { + if lower.starts_with(prefix) { + return *ctx; + } + } + 8_192 +} + +fn infer_max_output(name: &str, top_provider_max: Option) -> Option { + if let Some(v) = top_provider_max { + return Some(v as i64); + } + let lower = name.to_lowercase(); + let fallbacks: &[(&str, i64)] = &[ + ("gpt-4o", 16_384), + ("chatgpt-4o", 16_384), + ("o1-preview", 32_768), + ("o1-mini", 65_536), + ("o1", 100_000), + ("o3-mini", 100_000), + ("gpt-4-turbo", 4_096), + ("gpt-4-32k", 32_768), + ("gpt-4", 8_192), + ("gpt-4o-mini", 16_384), + ("chatgpt-4o-mini", 16_384), + ("gpt-3.5-turbo", 4_096), + ("gpt-image-1", 1_024), + ]; + for (prefix, max) in fallbacks { + if lower.starts_with(prefix) { + return Some(*max); + } + } + if lower.starts_with("gpt") || lower.starts_with("o1") || lower.starts_with("o3") { + Some(4_096) + } else { + None + } +} + +fn infer_capability_list(name: &str) -> Vec<(CapabilityType, bool)> { + let lower = name.to_lowercase(); + let mut caps = Vec::new(); + caps.push((CapabilityType::FunctionCall, true)); + + if lower.contains("gpt-") || lower.contains("o1") || lower.contains("o3") { + caps.push((CapabilityType::ToolUse, true)); + } + + if lower.contains("vision") + || lower.contains("gpt-4o") + || lower.contains("gpt-image") + || lower.contains("dall-e") + { + caps.push((CapabilityType::Vision, true)); + } + + if lower.contains("o1") || lower.contains("o3") { + caps.push((CapabilityType::Reasoning, true)); + } + + caps +} + +fn infer_pricing_fallback(name: &str) -> Option<(String, String)> { + let lower = name.to_lowercase(); + if lower.contains("gpt-4o-mini") || lower.contains("chatgpt-4o-mini") { + Some(("0.075".to_string(), "0.30".to_string())) + } else if lower.contains("gpt-4o") || lower.contains("chatgpt-4o") { + Some(("2.50".to_string(), "10.00".to_string())) + } else if lower.contains("gpt-4-turbo") { + Some(("10.00".to_string(), "30.00".to_string())) + } else if lower.contains("gpt-4") && !lower.contains("4o") { + Some(("15.00".to_string(), "60.00".to_string())) + } else if lower.contains("gpt-3.5-turbo") { + Some(("0.50".to_string(), "1.50".to_string())) + } else if lower.contains("o1-preview") { + Some(("15.00".to_string(), "60.00".to_string())) + } else if lower.contains("o1-mini") { + Some(("3.00".to_string(), "12.00".to_string())) + } else if lower.contains("o1") { + Some(("15.00".to_string(), "60.00".to_string())) + } else if lower.contains("o3-mini") { + Some(("1.50".to_string(), "6.00".to_string())) + } else if lower.contains("embedding-3-small") { + Some(("0.02".to_string(), "0.00".to_string())) + } else if lower.contains("embedding-3-large") { + Some(("0.13".to_string(), "0.00".to_string())) + } else if lower.contains("embedding-ada") { + Some(("0.10".to_string(), "0.00".to_string())) + } else if lower.contains("embedding") { + Some(("0.10".to_string(), "0.00".to_string())) + } else if lower.contains("dall-e") { + Some(("0.00".to_string(), "4.00".to_string())) + } else if lower.contains("whisper") { + Some(("0.00".to_string(), "0.006".to_string())) + } else { + None + } +} + +// Provider helpers ----------------------------------------------------------- + +/// Extract provider slug from OpenRouter model ID (e.g. "anthropic/claude-3.5-sonnet" → "anthropic"). +fn extract_provider(model_id: &str) -> &str { + model_id.split('/').next().unwrap_or("unknown") +} + +/// Normalize a provider slug to a short canonical name. +fn normalize_provider_name(slug: &str) -> &'static str { + match slug { + "openai" => "openai", + "anthropic" => "anthropic", + "google" | "google-ai" => "google", + "mistralai" => "mistral", + "meta-llama" | "meta" => "meta", + "deepseek" => "deepseek", + "azure" | "azure-openai" => "azure", + "x-ai" | "xai" => "xai", + s => Box::leak(s.to_string().into_boxed_str()), + } +} + +fn provider_display_name(name: &str) -> String { + match name { + "openai" => "OpenAI".to_string(), + "anthropic" => "Anthropic".to_string(), + "google" => "Google DeepMind".to_string(), + "mistral" => "Mistral AI".to_string(), + "meta" => "Meta".to_string(), + "deepseek" => "DeepSeek".to_string(), + "azure" => "Microsoft Azure".to_string(), + "xai" => "xAI".to_string(), + s => s.to_string(), + } +} + +// Upsert helpers ------------------------------------------------------------- + +async fn upsert_provider( + db: &AppDatabase, + slug: &str, +) -> Result { + let name = normalize_provider_name(slug); + let display = provider_display_name(name); + let now = Utc::now(); + + use models::agents::model_provider::Column as PCol; + if let Some(existing) = ProviderEntity::find() + .filter(PCol::Name.eq(name)) + .one(db) + .await? + { + let mut active: models::agents::model_provider::ActiveModel = existing.into(); + active.updated_at = Set(now); + active.update(db).await?; + Ok(ProviderEntity::find() + .filter(PCol::Name.eq(name)) + .one(db) + .await? + .unwrap()) + } else { + let active = models::agents::model_provider::ActiveModel { + id: Set(Uuid::now_v7()), + name: Set(name.to_string()), + display_name: Set(display.to_string()), + website: Set(None), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + updated_at: Set(now), + }; + active.insert(db).await.map_err(AppError::from) + } +} + +/// Upsert a model record and return (model, is_new). +async fn upsert_model( + db: &AppDatabase, + provider_id: Uuid, + model_id_str: &str, + or_model: &OpenRouterModel, +) -> Result<(models::agents::model::Model, bool), AppError> { + let now = Utc::now(); + let modality_str = or_model + .architecture + .as_ref() + .and_then(|a| a.modality.as_deref()); + let modality = infer_modality(model_id_str, modality_str); + let capability = infer_capability(model_id_str); + + // OpenRouter context_length takes priority; fall back to inference + let context_length = or_model + .context_length + .map(|c| c as i64) + .unwrap_or_else(|| infer_context_length(model_id_str)); + + let max_output = + infer_max_output(model_id_str, or_model.top_provider.as_ref().and_then(|p| p.max_completion_tokens)); + + use models::agents::model::Column as MCol; + if let Some(existing) = ModelEntity::find() + .filter(MCol::ProviderId.eq(provider_id)) + .filter(MCol::Name.eq(model_id_str)) + .one(db) + .await? + { + let mut active: models::agents::model::ActiveModel = existing.clone().into(); + active.context_length = Set(context_length); + active.max_output_tokens = Set(max_output); + active.status = Set(ModelStatus::Active.to_string()); + active.updated_at = Set(now); + active.update(db).await?; + Ok((ModelEntity::find_by_id(existing.id).one(db).await?.unwrap(), false)) + } else { + let active = models::agents::model::ActiveModel { + id: Set(Uuid::now_v7()), + provider_id: Set(provider_id), + name: Set(model_id_str.to_string()), + modality: Set(modality.to_string()), + capability: Set(capability.to_string()), + context_length: Set(context_length), + max_output_tokens: Set(max_output), + training_cutoff: Set(None), + is_open_source: Set(false), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + let inserted = active.insert(db).await.map_err(AppError::from)?; + Ok((inserted, true)) + } +} + +/// Upsert default version for a model. +async fn upsert_version( + db: &AppDatabase, + model_uuid: Uuid, +) -> Result<(models::agents::model_version::Model, bool), AppError> { + use models::agents::model_version::Column as VCol; + let now = Utc::now(); + if let Some(existing) = VersionEntity::find() + .filter(VCol::ModelId.eq(model_uuid)) + .filter(VCol::IsDefault.eq(true)) + .one(db) + .await? + { + Ok((existing, false)) + } else { + let active = models::agents::model_version::ActiveModel { + id: Set(Uuid::now_v7()), + model_id: Set(model_uuid), + version: Set("1".to_string()), + release_date: Set(None), + change_log: Set(None), + is_default: Set(true), + status: Set(ModelStatus::Active.to_string()), + created_at: Set(now), + }; + let inserted = active.insert(db).await.map_err(AppError::from)?; + Ok((inserted, true)) + } +} + +/// Upsert pricing for a model version. Returns true if created. +async fn upsert_pricing( + db: &AppDatabase, + version_uuid: Uuid, + pricing: Option<&OpenRouterPricing>, + model_name: &str, +) -> Result { + use models::agents::model_pricing::Column as PCol; + let existing = PricingEntity::find() + .filter(PCol::ModelVersionId.eq(version_uuid)) + .one(db) + .await?; + if existing.is_some() { + return Ok(false); + } + + let (input_str, output_str) = if let Some(p) = pricing { + // OpenRouter prices are per-million-tokens strings + (p.prompt.clone(), p.completion.clone()) + } else if let Some((i, o)) = infer_pricing_fallback(model_name) { + (i, o) + } else { + ("0.00".to_string(), "0.00".to_string()) + }; + + let active = models::agents::model_pricing::ActiveModel { + id: Set(Uuid::now_v7().as_u128() as i64), + model_version_id: Set(version_uuid), + input_price_per_1k_tokens: Set(input_str), + output_price_per_1k_tokens: Set(output_str), + currency: Set("USD".to_string()), + effective_from: Set(Utc::now()), + }; + active.insert(db).await.map_err(AppError::from)?; + Ok(true) +} + +/// Upsert capability records for a model version. Returns count of new records. +async fn upsert_capabilities( + db: &AppDatabase, + version_uuid: Uuid, + model_name: &str, +) -> Result { + use models::agents::model_capability::Column as CCol; + let caps = infer_capability_list(model_name); + let now = Utc::now(); + let mut created = 0i64; + + for (cap_type, supported) in caps { + let exists = CapabilityEntity::find() + .filter(CCol::ModelVersionId.eq(version_uuid)) + .filter(CCol::Capability.eq(cap_type.to_string())) + .one(db) + .await?; + if exists.is_some() { + continue; + } + let active = models::agents::model_capability::ActiveModel { + id: Set(Uuid::now_v7().as_u128() as i64), + model_version_id: Set(version_uuid.as_u128() as i64), + capability: Set(cap_type.to_string()), + is_supported: Set(supported), + created_at: Set(now), + }; + active.insert(db).await.map_err(AppError::from)?; + created += 1; + } + Ok(created) +} + +/// Upsert default parameter profile for a model version. Returns true if created. +async fn upsert_parameter_profile( + db: &AppDatabase, + version_uuid: Uuid, + model_name: &str, +) -> Result { + use models::agents::model_parameter_profile::Column as PCol; + let existing = ProfileEntity::find() + .filter(PCol::ModelVersionId.eq(version_uuid)) + .one(db) + .await?; + if existing.is_some() { + return Ok(false); + } + + let lower = model_name.to_lowercase(); + let (t_min, t_max) = if lower.contains("o1") || lower.contains("o3") { + (1.0, 1.0) + } else { + (0.0, 2.0) + }; + + let active = models::agents::model_parameter_profile::ActiveModel { + id: Set(Uuid::now_v7().as_u128() as i64), + model_version_id: Set(version_uuid), + temperature_min: Set(t_min), + temperature_max: Set(t_max), + top_p_min: Set(0.0), + top_p_max: Set(1.0), + frequency_penalty_supported: Set(true), + presence_penalty_supported: Set(true), + }; + active.insert(db).await.map_err(AppError::from)?; + Ok(true) +} + +impl AppService { + /// Sync models from OpenRouter into the local database. + /// + /// Calls OpenRouter's `GET /api/v1/models` using `OPENROUTER_API_KEY` + /// (falls back to `AI_API_KEY` if not set), then upserts provider / + /// model / version / pricing / capability / parameter-profile records. + /// + /// OpenRouter returns `context_length`, `pricing`, and `architecture.modality` + /// per model — these drive all inference-free field population. + /// Capabilities are still inferred from model name patterns. + pub async fn sync_upstream_models( + &self, + _ctx: &Session, + ) -> Result { + // Resolve API key: prefer OPENROUTER_API_KEY env var, fall back to AI_API_KEY. + let api_key = std::env::var("OPENROUTER_API_KEY") + .ok() + .or_else(|| self.config.ai_api_key().ok()) + .ok_or_else(|| { + AppError::InternalServerError( + "OPENROUTER_API_KEY or AI_API_KEY must be configured to sync models".into(), + ) + })?; + + let client = reqwest::Client::new(); + let resp: OpenRouterResponse = client + .get("https://openrouter.ai/api/v1/models") + .header("Authorization", format!("Bearer {api_key}")) + .send() + .await + .map_err(|e| AppError::InternalServerError(format!("OpenRouter API request failed: {}", e)))? + .error_for_status() + .map_err(|e| AppError::InternalServerError(format!("OpenRouter API error: {}", e)))? + .json() + .await + .map_err(|e| AppError::InternalServerError(format!("Failed to parse OpenRouter response: {}", e)))?; + + let mut models_created = 0i64; + let mut models_updated = 0i64; + let mut versions_created = 0i64; + let mut pricing_created = 0i64; + let mut capabilities_created = 0i64; + let mut profiles_created = 0i64; + + for or_model in resp.data { + // Filter out openrouter/auto which has negative pricing + if or_model.id == "openrouter/auto" { + continue; + } + + let provider_slug = extract_provider(&or_model.id); + let provider = upsert_provider(&self.db, provider_slug).await?; + + let (model_record, is_new) = + upsert_model(&self.db, provider.id, &or_model.id, &or_model).await?; + + if is_new { + models_created += 1; + } else { + models_updated += 1; + } + + let (version_record, version_is_new) = + upsert_version(&self.db, model_record.id).await?; + if version_is_new { + versions_created += 1; + } + + if upsert_pricing( + &self.db, + version_record.id, + or_model.pricing.as_ref(), + &or_model.id, + ) + .await? + { + pricing_created += 1; + } + + capabilities_created += + upsert_capabilities(&self.db, version_record.id, &or_model.id).await?; + + if upsert_parameter_profile(&self.db, version_record.id, &or_model.id).await? { + profiles_created += 1; + } + } + + Ok(SyncModelsResponse { + models_created, + models_updated, + versions_created, + pricing_created, + capabilities_created, + profiles_created, + }) + } +} diff --git a/libs/service/auth/captcha.rs b/libs/service/auth/captcha.rs new file mode 100644 index 0000000..89f9ba2 --- /dev/null +++ b/libs/service/auth/captcha.rs @@ -0,0 +1,67 @@ +use crate::AppService; +use crate::auth::rsa::RsaResponse; +use crate::error::AppError; +use session::Session; +use utoipa::{IntoParams, ToSchema}; + +#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, ToSchema, IntoParams)] +pub struct CaptchaQuery { + pub w: u32, + pub h: u32, + pub dark: bool, + pub rsa: bool, +} + +#[derive(serde::Serialize, ToSchema)] +pub struct CaptchaResponse { + pub base64: String, + pub rsa: Option, + pub req: CaptchaQuery, +} + +impl AppService { + const CAPTCHA_KEY: &'static str = "captcha"; + const CAPTCHA_LENGTH: usize = 4; + pub async fn auth_captcha( + &self, + context: &Session, + query: CaptchaQuery, + ) -> Result { + let CaptchaQuery { w, h, dark, rsa } = query; + let captcha = captcha_rs::CaptchaBuilder::new() + .width(w) + .height(h) + .dark_mode(dark) + .length(Self::CAPTCHA_LENGTH) + .build(); + let base64 = captcha.to_base64(); + let text = captcha.text; + context.insert(Self::CAPTCHA_KEY, text).ok(); + Ok(CaptchaResponse { + base64, + rsa: if rsa { + Some(self.auth_rsa(context).await?) + } else { + None + }, + req: CaptchaQuery { w, h, dark, rsa }, + }) + } + pub async fn auth_check_captcha( + &self, + context: &Session, + captcha: String, + ) -> Result<(), AppError> { + let text = context + .get::(Self::CAPTCHA_KEY) + .map_err(|_| AppError::CaptchaError)? + .ok_or(AppError::CaptchaError)?; + if text.to_lowercase() != captcha.to_lowercase() { + context.remove(Self::CAPTCHA_KEY); + slog::warn!(self.logs, "Captcha verification failed"; "ip" => context.ip_address()); + return Err(AppError::CaptchaError); + } + context.remove(Self::CAPTCHA_KEY); + Ok(()) + } +} diff --git a/libs/service/auth/email.rs b/libs/service/auth/email.rs new file mode 100644 index 0000000..d1bb2c2 --- /dev/null +++ b/libs/service/auth/email.rs @@ -0,0 +1,185 @@ +use crate::AppService; +use crate::error::AppError; +use argon2::{Argon2, PasswordHash, PasswordVerifier}; +use models::users::{user_email, user_email_change, user_password}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct EmailChangeRequest { + pub new_email: String, + pub password: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct EmailVerifyRequest { + pub token: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct EmailResponse { + pub email: Option, +} + +impl AppService { + /// Get the current email address for the authenticated user. + pub async fn auth_get_email(&self, ctx: &Session) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let email = user_email::Entity::find() + .filter(user_email::Column::User.eq(user_uid)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + Ok(EmailResponse { + email: email.map(|e| e.email), + }) + } + + /// Request an email change: validates password, stores a pending token, + /// and sends a verification email to the new address. + pub async fn auth_email_change_request( + &self, + ctx: &Session, + params: EmailChangeRequest, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + // Verify password + let password = self.auth_rsa_decode(ctx, params.password).await?; + + let user_password = user_password::Entity::find() + .filter(user_password::Column::User.eq(user_uid)) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound)?; + + let hash = + PasswordHash::new(&user_password.password_hash).map_err(|_| AppError::UserNotFound)?; + Argon2::default() + .verify_password(password.as_bytes(), &hash) + .map_err(|_| AppError::InvalidPassword)?; + + // Check new email is not already taken + let existing = user_email::Entity::find() + .filter(user_email::Column::Email.eq(¶ms.new_email)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + if existing.is_some() { + return Err(AppError::EmailExists); + } + + // Generate token and store pending change + let token = self.generate_reset_token(); + let expires_at = chrono::Utc::now() + chrono::Duration::hours(24); + + let _ = user_email_change::Entity::delete_many() + .filter(user_email_change::Column::UserUid.eq(user_uid)) + .filter(user_email_change::Column::Used.eq(false)) + .exec(&self.db) + .await; + + user_email_change::ActiveModel { + token: Set(token.clone()), + user_uid: Set(user_uid), + new_email: Set(params.new_email.clone()), + expires_at: Set(expires_at), + used: Set(false), + created_at: Set(chrono::Utc::now()), + } + .insert(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + // Queue verification email via Redis Stream + let domain = self + .config + .main_domain() + .map_err(|_| AppError::DoMainNotSet)?; + + let verify_link = format!("https://{}/auth/verify-email?token={}", domain, token); + + let envelope = queue::EmailEnvelope { + id: Uuid::new_v4(), + to: params.new_email.clone(), + subject: "Confirm Email Change".to_string(), + body: format!( + "You have requested to change your email address.\n\n\ + Please click the link below to confirm:\n\n{}\n\n\ + This link will expire in 24 hours.\n\n\ + If you did not request this change, please ignore this email.", + verify_link + ), + created_at: chrono::Utc::now(), + }; + + self.queue_producer + .publish_email(envelope) + .await + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + slog::info!(self.logs, "Email change verification queued"; "new_email" => %params.new_email, "user_uid" => %user_uid); + Ok(()) + } + + /// Verify an email change token and apply the new email. + pub async fn auth_email_verify(&self, params: EmailVerifyRequest) -> Result<(), AppError> { + let change = user_email_change::Entity::find() + .filter(user_email_change::Column::Token.eq(¶ms.token)) + .filter(user_email_change::Column::Used.eq(false)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .ok_or(AppError::NotFound("Invalid or expired token".to_string()))?; + + // Check expiry + if change.expires_at < chrono::Utc::now() { + return Err(AppError::NotFound("Token has expired".to_string())); + } + + // Update or insert the new email in user_email + let existing_email = user_email::Entity::find() + .filter(user_email::Column::User.eq(change.user_uid)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + match existing_email { + Some(email_model) => { + let mut active: user_email::ActiveModel = email_model.into(); + active.email = Set(change.new_email.clone()); + active.update(&self.db).await + } + None => { + user_email::ActiveModel { + user: Set(change.user_uid), + email: Set(change.new_email.clone()), + created_at: Set(chrono::Utc::now()), + } + .insert(&self.db) + .await + } + } + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + // Mark token as used + let new_email = change.new_email.clone(); + let user_uid = change.user_uid; + let mut used_change: user_email_change::ActiveModel = change.into(); + used_change.used = Set(true); + used_change + .update(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + slog::info!(self.logs, "Email changed successfully"; "new_email" => %new_email, "user_uid" => %user_uid); + Ok(()) + } +} diff --git a/libs/service/auth/login.rs b/libs/service/auth/login.rs new file mode 100644 index 0000000..81195fe --- /dev/null +++ b/libs/service/auth/login.rs @@ -0,0 +1,122 @@ +use crate::AppService; +use crate::error::AppError; +use argon2::{Argon2, PasswordHash, PasswordVerifier}; +use models::users::{user_activity_log, user_password}; +use rand::RngExt; +use redis::AsyncCommands; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use sha1::Digest; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct LoginParams { + pub username: String, + pub password: String, + pub captcha: String, + pub totp_code: Option, +} + +impl AppService { + pub const TOTP_KEY: &'static str = "totp_key"; + pub async fn auth_login(&self, params: LoginParams, context: Session) -> Result<(), AppError> { + self.auth_check_captcha(&context, params.captcha).await?; + let password = self.auth_rsa_decode(&context, params.password).await?; + let user = match self + .utils_find_user_by_username(params.username.clone()) + .await + { + Ok(user) => user, + Err(_) => { + self.utils_find_user_by_email(params.username.clone()) + .await? + } + }; + let user_password = user_password::Entity::find() + .filter(user_password::Column::User.eq(user.uid)) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound)?; + let password_hash = + PasswordHash::new(&user_password.password_hash).map_err(|_| AppError::UserNotFound)?; + + if let Err(_e) = Argon2::default().verify_password(password.as_bytes(), &password_hash) { + slog::warn!(self.logs, "Login failed: invalid password"; "username" => ¶ms.username, "ip" => context.ip_address()); + return Err(AppError::UserNotFound); + } + + let needs_totp_verification = context + .get::(Self::TOTP_KEY) + .ok() + .flatten() + .is_some(); + + if needs_totp_verification { + if let Some(ref totp_code) = params.totp_code { + if !self.auth_2fa_verify_login(&context, totp_code).await? { + slog::warn!(self.logs, "Login failed: invalid 2FA code"; "username" => ¶ms.username, "ip" => context.ip_address()); + return Err(AppError::InvalidTwoFactorCode); + } + } + } else if !self.auth_2fa_status(&context).await?.is_enabled { + let user_uid = user.uid; + let mut rng = rand::rng(); + let mut sha = sha1::Sha1::default(); + for _ in 0..5 { + sha.update( + (0..1024) + .map(|_| { + format!( + "{:04}-{:04}-{:04}", + rng.random_range(0..10000), + rng.random_range(0..10000), + rng.random_range(0..10000) + ) + }) + .collect::() + .as_bytes(), + ) + } + let key = format!("{:?}", sha.finalize()); + context.insert(Self::TOTP_KEY, key.clone()).ok(); + if let Ok(mut conn) = self.cache.conn().await { + conn.set_ex::(key, user_uid.to_string(), 60 * 5) + .await + .ok(); + } + slog::info!(self.logs, "Login 2FA triggered for new 2FA user"; "username" => ¶ms.username, "ip" => context.ip_address()); + return Err(AppError::TwoFactorRequired); + } + + let mut arch = user.clone().into_active_model(); + arch.last_sign_in_at = Set(Some(chrono::Utc::now())); + arch.update(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user.uid)), + action: Set("login".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(Some(serde_json::json!({ + "method": "password", + "username": user.username, + "2fa_used": params.totp_code.is_some() + })) + .into()), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + context.set_user(user.uid); + context.remove(Self::RSA_PRIVATE_KEY); + context.remove(Self::RSA_PUBLIC_KEY); + slog::info!(self.logs, "User logged in successfully"; "user_uid" => %user.uid, "username" => &user.username, "ip" => context.ip_address(), "2fa_used" => params.totp_code.is_some()); + Ok(()) + } +} diff --git a/libs/service/auth/logout.rs b/libs/service/auth/logout.rs new file mode 100644 index 0000000..e4158b3 --- /dev/null +++ b/libs/service/auth/logout.rs @@ -0,0 +1,28 @@ +use crate::AppService; +use crate::error::AppError; +use models::users::user_activity_log; +use sea_orm::*; +use serde_json::json; +use session::Session; + +impl AppService { + pub async fn auth_logout(&self, context: &Session) -> Result<(), AppError> { + if let Some(user_uid) = context.user() { + slog::info!(self.logs, "User logged out"; "user_uid" => %user_uid, "ip" => context.ip_address()); + let _ = user_activity_log::ActiveModel { + user_uid: Set(Option::from(user_uid)), + action: Set("logout".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(json!({})), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + } + context.clear_user(); + context.clear(); + Ok(()) + } +} diff --git a/libs/service/auth/me.rs b/libs/service/auth/me.rs new file mode 100644 index 0000000..75dee84 --- /dev/null +++ b/libs/service/auth/me.rs @@ -0,0 +1,29 @@ +use crate::AppService; +use crate::error::AppError; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ContextMe { + pub uid: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub has_unread_notifications: u64, +} +impl AppService { + pub async fn auth_me(&self, ctx: Session) -> Result { + let user_id = ctx.user().ok_or(AppError::Unauthorized)?; + let user = self.utils_find_user_by_uid(user_id).await?; + // let notify = self.notify_get_user_unread_count(&ctx).await?; + Ok(ContextMe { + uid: user.uid, + username: user.username, + display_name: user.display_name, + avatar_url: user.avatar_url, + // has_unread_notifications: notify, + has_unread_notifications: 0, + }) + } +} diff --git a/libs/service/auth/mod.rs b/libs/service/auth/mod.rs new file mode 100644 index 0000000..71c4ed2 --- /dev/null +++ b/libs/service/auth/mod.rs @@ -0,0 +1,9 @@ +pub mod captcha; +pub mod email; +pub mod login; +pub mod logout; +pub mod me; +pub mod password; +pub mod register; +pub mod rsa; +pub mod totp; diff --git a/libs/service/auth/password.rs b/libs/service/auth/password.rs new file mode 100644 index 0000000..e6a7ad9 --- /dev/null +++ b/libs/service/auth/password.rs @@ -0,0 +1,188 @@ +use crate::AppService; +use crate::error::AppError; +use argon2::password_hash::{Salt, SaltString}; +use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; +use models::users::{user_activity_log, user_password, user_password_reset}; +use rand::RngExt; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ChangePasswordParams { + pub old_password: String, + pub new_password: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ResetPasswordParams { + pub email: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ConfirmResetPasswordParams { + pub token: String, + pub new_password: String, +} + +impl AppService { + pub async fn auth_change_password( + &self, + context: &Session, + params: ChangePasswordParams, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let old_password = self.auth_rsa_decode(context, params.old_password).await?; + let new_password = self.auth_rsa_decode(context, params.new_password).await?; + + Self::validate_password_strength(&new_password)?; + + let user_password = user_password::Entity::find() + .filter(user_password::Column::User.eq(user_uid)) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound)?; + + let password_hash = + PasswordHash::new(&user_password.password_hash).map_err(|_| AppError::UserNotFound)?; + + Argon2::default() + .verify_password(old_password.as_bytes(), &password_hash) + .map_err(|_| AppError::UserNotFound)?; + + let salt = SaltString::generate(&mut rsa::rand_core::OsRng::default()); + let new_password_hash = Argon2::default() + .hash_password(new_password.as_bytes(), Salt::from_b64(&*salt.to_string())?) + .map_err(|_| AppError::UserNotFound)? + .to_string(); + + let mut active_password: user_password::ActiveModel = user_password.into(); + active_password.password_hash = Set(new_password_hash); + active_password.password_salt = Set(Some(salt.to_string())); + + active_password + .update(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + slog::info!(self.logs, "Password changed"; "user_uid" => %user_uid, "ip" => context.ip_address()); + let _ = user_activity_log::ActiveModel { + user_uid: Set(Option::from(user_uid)), + action: Set("password_change".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "method": "change_password" + })), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(()) + } + + pub async fn auth_request_password_reset( + &self, + params: ResetPasswordParams, + ) -> Result<(), AppError> { + let user = self.utils_find_user_by_email(params.email.clone()).await?; + + let token = self.generate_reset_token(); + + let expires_at = chrono::Utc::now() + chrono::Duration::hours(1); + + let _ = user_password_reset::Entity::delete_many() + .filter(user_password_reset::Column::UserUid.eq(user.uid)) + .filter(user_password_reset::Column::Used.eq(false)) + .exec(&self.db) + .await; + + let reset_token = user_password_reset::ActiveModel { + token: Set(token.clone()), + user_uid: Set(user.uid), + expires_at: Set(expires_at), + used: Set(false), + created_at: Set(chrono::Utc::now()), + }; + + reset_token + .insert(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + let domain = self + .config + .main_domain() + .map_err(|_| AppError::DoMainNotSet)?; + + let email_address = params.email.clone(); + let reset_link = format!("https://{}/auth/reset-password?token={}", domain, token); + + let envelope = queue::EmailEnvelope { + id: Uuid::new_v4(), + to: email_address.clone(), + subject: "Password Reset Request".to_string(), + body: format!( + "Hello {},\n\n\ + You have requested to reset your password. Please click the link below to reset your password:\n\n\ + {}\n\n\ + This link will expire in 1 hour.\n\n\ + If you did not request this password reset, please ignore this email.\n\n\ + Best regards,\n\ + GitDataAI Team", + user.username, reset_link + ), + created_at: chrono::Utc::now(), + }; + + self.queue_producer + .publish_email(envelope) + .await + .map_err(|_| AppError::UserNotFound)?; + + slog::info!(self.logs, "Password reset email queued"; "email" => email_address); + Ok(()) + } + pub fn validate_password_strength(password: &str) -> Result<(), AppError> { + if password.len() < 8 { + return Err(AppError::UserNotFound); + } + + let has_uppercase = password.chars().any(|c| c.is_uppercase()); + let has_lowercase = password.chars().any(|c| c.is_lowercase()); + let has_digit = password.chars().any(|c| c.is_numeric()); + + if !has_uppercase || !has_lowercase || !has_digit { + return Err(AppError::UserNotFound); + } + + Ok(()) + } + + pub fn generate_reset_token(&self) -> String { + use rand::distr::Alphanumeric; + #[allow(deprecated)] + let mut rng = rand::rng(); + let token: String = (0..64).map(|_| rng.sample(Alphanumeric) as char).collect(); + format!("rst_{}", token) + } + + pub async fn auth_cleanup_expired_reset_tokens(&self) -> Result { + let now = chrono::Local::now().naive_local(); + + let result = user_password_reset::Entity::delete_many() + .filter(user_password_reset::Column::ExpiresAt.lt(now)) + .exec(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + slog::info!(self.logs, "Expired password reset tokens cleaned up"; "count" => result.rows_affected); + Ok(result.rows_affected) + } +} diff --git a/libs/service/auth/register.rs b/libs/service/auth/register.rs new file mode 100644 index 0000000..26d72c8 --- /dev/null +++ b/libs/service/auth/register.rs @@ -0,0 +1,171 @@ +use crate::AppService; +use crate::error::AppError; +use argon2::password_hash::{Salt, SaltString}; +use argon2::{Argon2, PasswordHasher}; +use models::users::{user, user_activity_log, user_email, user_password}; +use models::workspaces::{WorkspaceRole, workspace, workspace_membership}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct RegisterParams { + pub username: String, + pub email: String, + pub password: String, + pub captcha: String, +} + +impl AppService { + pub async fn auth_register( + &self, + params: RegisterParams, + context: &Session, + ) -> Result { + self.auth_check_captcha(&context, params.captcha).await?; + let password = self.auth_rsa_decode(&context, params.password).await?; + if self + .utils_find_user_by_username(params.username.clone()) + .await + .is_ok() + { + slog::warn!(self.logs, "Registration failed: username already exists"; "username" => ¶ms.username); + return Err(AppError::UserNameExists); + } + + if self + .utils_find_user_by_email(params.email.clone()) + .await + .is_ok() + { + slog::warn!(self.logs, "Registration failed: email already exists"; "email" => ¶ms.email); + return Err(AppError::EmailExists); + } + + let user_uid = Uuid::now_v7(); + let now = chrono::Utc::now(); + let txn = self.db.begin().await.map_err(|_| AppError::TxnError)?; + + let user_model = user::ActiveModel { + uid: Set(user_uid), + username: Set(params.username.clone()), + display_name: Set(Some(params.username.clone())), + avatar_url: Set(None), + website_url: Set(None), + organization: Set(None), + last_sign_in_at: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + + let user = user_model.insert(&txn).await.map_err(|e| { + slog::error!(self.logs, "{}", format!("Failed to insert user: {:?}", e)); + AppError::UserNotFound + })?; + + let user_email_model = user_email::ActiveModel { + user: Set(user_uid), + email: Set(params.email), + created_at: Set(now), + }; + + user_email_model.insert(&txn).await.map_err(|e| { + slog::error!( + self.logs, + "{}", + format!("Failed to insert user email: {:?}", e) + ); + AppError::UserNotFound + })?; + + let salt = SaltString::generate(&mut rsa::rand_core::OsRng::default()); + let password_hash = Argon2::default() + .hash_password(password.as_bytes(), Salt::from_b64(&*salt.to_string())?) + .map_err(|e| { + slog::error!(self.logs, "{}", format!("Failed to hash password: {:?}", e)); + AppError::UserNotFound + })? + .to_string(); + let user_password_model = user_password::ActiveModel { + user: Set(user_uid), + password_hash: Set(password_hash), + password_salt: Set(Some(salt.to_string())), + is_active: Set(true), + created_at: Set(now), + updated_at: Set(now), + }; + + user_password_model.insert(&txn).await.map_err(|e| { + slog::error!( + self.logs, + "{}", + format!("Failed to insert user password: {:?}", e) + ); + AppError::UserNotFound + })?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Option::from(user_uid)), + action: Set("register".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "username": user.username.clone(), + "method": "password" + }) + .into()), + created_at: Set(now), + ..Default::default() + } + .insert(&txn) + .await; + + // Auto-create personal workspace for the new user + let personal_slug = format!("~{}", params.username); + let ws = workspace::ActiveModel { + id: Set(Uuid::now_v7()), + slug: Set(personal_slug), + name: Set(format!("{} 的工作空间", params.username)), + description: Set(None), + avatar_url: Set(None), + plan: Set("free".to_string()), + billing_email: Set(None), + stripe_customer_id: Set(None), + stripe_subscription_id: Set(None), + plan_expires_at: Set(None), + deleted_at: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + let ws = ws.insert(&txn).await.map_err(|e| { + slog::error!( + self.logs, + "{}", + format!("Failed to insert personal workspace: {:?}", e) + ); + AppError::UserNotFound + })?; + + let _ = workspace_membership::ActiveModel { + id: Default::default(), + workspace_id: Set(ws.id), + user_id: Set(user_uid), + role: Set(WorkspaceRole::Owner.to_string()), + status: Set("active".to_string()), + invited_by: Set(None), + joined_at: Set(now), + invite_token: Set(None), + invite_expires_at: Set(None), + } + .insert(&txn) + .await; + txn.commit().await.map_err(|_| AppError::TxnError)?; + context.set_user(user_uid); + context.set_current_workspace_id(ws.id); + context.remove(Self::RSA_PRIVATE_KEY); + context.remove(Self::RSA_PUBLIC_KEY); + slog::info!(self.logs, "User registered successfully"; "user_uid" => %user_uid, "username" => &user.username); + Ok(user) + } +} diff --git a/libs/service/auth/rsa.rs b/libs/service/auth/rsa.rs new file mode 100644 index 0000000..8af58a3 --- /dev/null +++ b/libs/service/auth/rsa.rs @@ -0,0 +1,66 @@ +use crate::AppService; +use crate::error::AppError; +use base64::Engine; +use rsa::pkcs1::{DecodeRsaPrivateKey, EncodeRsaPrivateKey, EncodeRsaPublicKey}; +use rsa::{Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey}; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct RsaResponse { + pub public_key: String, +} + +impl AppService { + pub const RSA_PUBLIC_KEY: &'static str = "rsa:public"; + pub const RSA_PRIVATE_KEY: &'static str = "rsa:private"; + const RSA_BIT_SIZE: usize = 2048; + pub async fn auth_rsa(&self, context: &Session) -> Result { + #[allow(deprecated)] + let mut rng = rsa::rand_core::OsRng::default(); + let Ok(priv_key) = RsaPrivateKey::new(&mut rng, Self::RSA_BIT_SIZE) else { + slog::error!(self.logs, "RSA key generation failed"); + return Err(AppError::RsaGenerationError); + }; + let pub_key = RsaPublicKey::from(&priv_key); + let priv_pem = priv_key + .to_pkcs1_pem(Default::default()) + .map_err(|_| AppError::RsaGenerationError)? + .to_string(); + let pub_pem = pub_key + .to_pkcs1_pem(Default::default()) + .map_err(|_| AppError::RsaGenerationError)? + .to_string(); + context + .insert(Self::RSA_PUBLIC_KEY, pub_pem.clone()) + .map_err(|_| AppError::RsaGenerationError)?; + context + .insert(Self::RSA_PRIVATE_KEY, priv_pem) + .map_err(|_| AppError::RsaGenerationError)?; + Ok(RsaResponse { + public_key: pub_pem, + }) + } + pub async fn auth_rsa_decode( + &self, + context: &Session, + data: String, + ) -> Result { + let priv_key = context + .get::(Self::RSA_PRIVATE_KEY) + .map_err(|_| AppError::RsaDecodeError)? + .ok_or(AppError::RsaDecodeError)?; + let Ok(priv_key) = RsaPrivateKey::from_pkcs1_pem(&priv_key) else { + slog::warn!(self.logs, "RSA decode failed: invalid private key"; "ip" => context.ip_address()); + return Err(AppError::RsaDecodeError); + }; + let cipher = base64::engine::general_purpose::STANDARD + .decode(&data) + .map_err(|_| AppError::RsaDecodeError)?; + let Ok(decrypted) = priv_key.decrypt(Pkcs1v15Encrypt, &cipher) else { + slog::warn!(self.logs, "RSA decrypt failed"; "ip" => context.ip_address()); + return Err(AppError::RsaDecodeError); + }; + Ok(String::from_utf8_lossy(&decrypted).to_string()) + } +} diff --git a/libs/service/auth/totp.rs b/libs/service/auth/totp.rs new file mode 100644 index 0000000..507a265 --- /dev/null +++ b/libs/service/auth/totp.rs @@ -0,0 +1,431 @@ +use crate::AppService; +use crate::error::AppError; +use models::users::{user_2fa, user_activity_log, user_password}; +use rand::RngExt; +use redis::AsyncCommands; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct Enable2FAResponse { + pub secret: String, + pub qr_code: String, + pub backup_codes: Vec, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct Verify2FAParams { + pub code: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct Disable2FAParams { + pub code: String, + pub password: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct Get2FAStatusResponse { + pub is_enabled: bool, + pub method: Option, + pub has_backup_codes: bool, +} + +impl AppService { + pub async fn auth_2fa_enable(&self, context: &Session) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + let user = self.utils_find_user_by_uid(user_uid).await?; + + let existing_2fa = user_2fa::Entity::find_by_id(user_uid).one(&self.db).await?; + + if let Some(ref existing) = existing_2fa { + if existing.is_enabled { + return Err(AppError::TwoFactorAlreadyEnabled); + } + } + + let secret = self.generate_totp_secret(); + + let backup_codes = self.generate_backup_codes(10); + + let issuer = "GitDataAI"; + let account_name = format!("{}:{}", issuer, user.username); + let qr_data = format!( + "otpauth://totp/{}?secret={}&issuer={}", + account_name, secret, issuer + ); + + let now = chrono::Utc::now(); + let model = user_2fa::ActiveModel { + user: Set(user_uid), + method: Set("totp".to_string()), + secret: Set(Some(secret.clone())), + backup_codes: Set(serde_json::json!(backup_codes)), + is_enabled: Set(false), + created_at: Set(now), + updated_at: Set(now), + }; + + if existing_2fa.is_some() { + model.update(&self.db).await?; + } else { + model.insert(&self.db).await?; + } + + slog::info!(self.logs, "2FA setup initiated"; "user_uid" => %user_uid); + Ok(Enable2FAResponse { + secret, + qr_code: qr_data, + backup_codes, + }) + } + + pub async fn auth_2fa_verify_and_enable( + &self, + context: &Session, + params: Verify2FAParams, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let two_fa = user_2fa::Entity::find_by_id(user_uid) + .one(&self.db) + .await? + .ok_or(AppError::TwoFactorNotSetup)?; + + if two_fa.is_enabled { + return Err(AppError::TwoFactorAlreadyEnabled); + } + + let secret = two_fa.secret.as_ref().ok_or(AppError::TwoFactorNotSetup)?; + + if !self.verify_totp_code(secret, ¶ms.code)? { + slog::warn!(self.logs, "2FA verification failed during setup"; "user_uid" => %user_uid, "ip" => context.ip_address()); + return Err(AppError::InvalidTwoFactorCode); + } + + let mut active_model: user_2fa::ActiveModel = two_fa.into(); + active_model.is_enabled = Set(true); + active_model.updated_at = Set(chrono::Utc::now()); + active_model.update(&self.db).await?; + + slog::info!(self.logs, "2FA enabled"; "user_uid" => %user_uid, "ip" => context.ip_address()); + let _ = user_activity_log::ActiveModel { + user_uid: Set(Option::from(user_uid)), + action: Set("2fa_enabled".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "method": "totp" + })), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(()) + } + + pub async fn auth_2fa_disable( + &self, + context: &Session, + params: Disable2FAParams, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let password = self.auth_rsa_decode(context, params.password).await?; + self.verify_user_password(user_uid, &password).await?; + + let two_fa = user_2fa::Entity::find_by_id(user_uid) + .one(&self.db) + .await? + .ok_or(AppError::TwoFactorNotSetup)?; + + if !two_fa.is_enabled { + return Err(AppError::TwoFactorNotEnabled); + } + + let secret = two_fa.secret.as_ref().ok_or(AppError::TwoFactorNotSetup)?; + let backup_codes: Vec = + serde_json::from_value(two_fa.backup_codes.clone()).unwrap_or_default(); + let is_valid = + self.verify_totp_code(secret, ¶ms.code)? || backup_codes.contains(¶ms.code); + + if !is_valid { + return Err(AppError::InvalidTwoFactorCode); + } + + user_2fa::Entity::delete_by_id(user_uid) + .exec(&self.db) + .await?; + + slog::info!(self.logs, "2FA disabled"; "user_uid" => %user_uid, "ip" => context.ip_address()); + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("2fa_disabled".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(json!({})), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(()) + } + + pub async fn auth_2fa_verify(&self, user_uid: Uuid, code: &str) -> Result { + let two_fa = user_2fa::Entity::find_by_id(user_uid).one(&self.db).await?; + + let Some(two_fa) = two_fa else { + return Ok(true); + }; + + if !two_fa.is_enabled { + return Ok(true); + } + + let secret = two_fa.secret.as_ref().ok_or(AppError::TwoFactorNotSetup)?; + + if self.verify_totp_code(secret, code)? { + return Ok(true); + } + + let mut backup_codes: Vec = + serde_json::from_value(two_fa.backup_codes.clone()).unwrap_or_default(); + if backup_codes.contains(&code.to_string()) { + backup_codes.retain(|c| c != code); + + let mut active_model: user_2fa::ActiveModel = user_2fa::Entity::find_by_id(user_uid) + .one(&self.db) + .await? + .ok_or(AppError::TwoFactorNotSetup)? + .into(); + + active_model.backup_codes = Set(serde_json::json!(backup_codes)); + active_model.updated_at = Set(chrono::Utc::now()); + active_model.update(&self.db).await?; + + return Ok(true); + } + + Ok(false) + } + + pub async fn auth_2fa_status( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let two_fa = user_2fa::Entity::find_by_id(user_uid).one(&self.db).await?; + + match two_fa { + Some(fa) => { + let backup_codes: Vec = + serde_json::from_value(fa.backup_codes).unwrap_or_default(); + Ok(Get2FAStatusResponse { + is_enabled: fa.is_enabled, + method: Some(fa.method), + has_backup_codes: !backup_codes.is_empty(), + }) + } + None => Ok(Get2FAStatusResponse { + is_enabled: false, + method: None, + has_backup_codes: false, + }), + } + } + + pub async fn auth_2fa_verify_login( + &self, + context: &Session, + code: &str, + ) -> Result { + let totp_key: String = context + .get::(Self::TOTP_KEY) + .ok() + .flatten() + .ok_or(AppError::TwoFactorNotSetup)?; + + if let Ok(mut conn) = self.cache.conn().await { + let stored_user_uid: Option = conn.get(totp_key.as_str()).await.ok(); + + if let Some(user_uid_str) = stored_user_uid { + let user_uid = + Uuid::parse_str(&user_uid_str).map_err(|_| AppError::UserNotFound)?; + let two_fa = user_2fa::Entity::find_by_id(user_uid).one(&self.db).await?; + if let Some(two_fa) = two_fa { + if two_fa.is_enabled { + let secret = two_fa.secret.as_ref().ok_or(AppError::TwoFactorNotSetup)?; + if self.verify_totp_code(secret, code)? { + let _: Option<()> = conn.del(totp_key.as_str()).await.ok(); + slog::info!(self.logs, "2FA verification succeeded during login"; "user_uid" => %user_uid, "ip" => context.ip_address()); + return Ok(true); + } else { + slog::warn!(self.logs, "2FA verification failed during login"; "user_uid" => %user_uid, "ip" => context.ip_address()); + } + } + } + } + } + + Ok(false) + } + + pub async fn auth_2fa_regenerate_backup_codes( + &self, + context: &Session, + password: String, + ) -> Result, AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let password = self.auth_rsa_decode(context, password).await?; + self.verify_user_password(user_uid, &password).await?; + + let two_fa = user_2fa::Entity::find_by_id(user_uid) + .one(&self.db) + .await? + .ok_or(AppError::TwoFactorNotSetup)?; + + if !two_fa.is_enabled { + return Err(AppError::TwoFactorNotEnabled); + } + + let backup_codes = self.generate_backup_codes(10); + + let mut active_model: user_2fa::ActiveModel = two_fa.into(); + active_model.backup_codes = Set(serde_json::json!(backup_codes)); + active_model.updated_at = Set(chrono::Utc::now()); + active_model.update(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("2fa_backup_codes_regenerated".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(json!({})), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + slog::info!(self.logs, "2FA backup codes regenerated"; "user_uid" => %user_uid, "ip" => context.ip_address()); + Ok(backup_codes) + } + + fn generate_totp_secret(&self) -> String { + const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; + #[allow(deprecated)] + let mut rng = rand::rng(); + (0..32) + .map(|_| { + #[allow(deprecated)] + let idx = rng.random_range(0..CHARSET.len()); + CHARSET[idx] as char + }) + .collect() + } + + fn generate_backup_codes(&self, count: usize) -> Vec { + #[allow(deprecated)] + let mut rng = rand::rng(); + (0..count) + .map(|_| { + format!( + "{:04}-{:04}-{:04}", + rng.random_range(0..10000), + rng.random_range(0..10000), + rng.random_range(0..10000) + ) + }) + .collect() + } + + fn verify_totp_code(&self, secret: &str, code: &str) -> Result { + let now = chrono::Utc::now().timestamp() as u64; + let time_step = 30; + let counter = now / time_step; + + for offset in [-1i64, 0, 1] { + let test_counter = (counter as i64 + offset) as u64; + let expected_code = self.generate_totp_code(secret, test_counter)?; + if expected_code == code { + return Ok(true); + } + } + + Ok(false) + } + + fn generate_totp_code(&self, secret: &str, counter: u64) -> Result { + use hmac::{Hmac, Mac}; + use sha1::Sha1; + + let secret_bytes = self.decode_base32(secret)?; + + let counter_bytes = counter.to_be_bytes(); + + let mut mac = Hmac::::new_from_slice(&secret_bytes) + .map_err(|_| AppError::InvalidTwoFactorCode)?; + mac.update(&counter_bytes); + let result = mac.finalize().into_bytes(); + + let offset = (result[19] & 0x0f) as usize; + let code = u32::from_be_bytes([ + result[offset] & 0x7f, + result[offset + 1], + result[offset + 2], + result[offset + 3], + ]); + + Ok(format!("{:06}", code % 1_000_000)) + } + + fn decode_base32(&self, input: &str) -> Result, AppError> { + const CHARSET: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; + let input = input.to_uppercase().replace("=", ""); + let mut bits = 0u64; + let mut bit_count = 0; + let mut output = Vec::new(); + + for c in input.chars() { + let val = CHARSET.find(c).ok_or(AppError::InvalidTwoFactorCode)? as u64; + bits = (bits << 5) | val; + bit_count += 5; + + if bit_count >= 8 { + bit_count -= 8; + output.push((bits >> bit_count) as u8); + bits &= (1 << bit_count) - 1; + } + } + + Ok(output) + } + + async fn verify_user_password(&self, user_uid: Uuid, password: &str) -> Result<(), AppError> { + use argon2::{Argon2, PasswordHash, PasswordVerifier}; + + let user_password = user_password::Entity::find() + .filter(user_password::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let password_hash = PasswordHash::new(&user_password.password_hash) + .map_err(|_| AppError::InvalidPassword)?; + + Argon2::default() + .verify_password(password.as_bytes(), &password_hash) + .map_err(|_| AppError::InvalidPassword)?; + + Ok(()) + } +} diff --git a/libs/service/error.rs b/libs/service/error.rs new file mode 100644 index 0000000..11974d6 --- /dev/null +++ b/libs/service/error.rs @@ -0,0 +1,264 @@ +use git::GitError; + +#[derive(Debug)] +pub enum AppError { + UserNotFound, + RsaGenerationError, + RsaDecodeError, + CaptchaError, + TwoFactorRequired, + Unauthorized, + DoMainNotSet, + UserNameExists, + EmailExists, + TxnError, + PasswordHashError(String), + TwoFactorAlreadyEnabled, + TwoFactorNotSetup, + InvalidTwoFactorCode, + TwoFactorNotEnabled, + DatabaseError(String), + InvalidPassword, + ProjectNotFound, + NoPower, + InternalError, + NotFound(String), + RoleParseError, + ProjectNameAlreadyExists, + RepoNameAlreadyExists, + AvatarUploadError(String), + InternalServerError(String), + PermissionDenied, + RepoNotFound, + RepoForBidAccess, + GitError(GitError), + SerdeError(serde_json::Error), + Io(std::io::Error), + BadRequest(String), + Forbidden(String), + WorkspaceNotFound, + WorkspaceSlugAlreadyExists, + WorkspaceNameAlreadyExists, + NotWorkspaceMember, + WorkspaceInviteTokenInvalid, + WorkspaceInviteExpired, + WorkspaceInviteAlreadyAccepted, + Conflict(String), +} + +impl AppError { + pub const fn code(&self) -> i32 { + use AppError::*; + match self { + BadRequest(_) => 40000, + CaptchaError => 40001, + SerdeError(_) => 40002, + RsaDecodeError => 40003, + RoleParseError => 40004, + TwoFactorNotSetup => 40005, + TwoFactorNotEnabled => 40006, + Unauthorized => 40101, + InvalidTwoFactorCode => 40102, + InvalidPassword => 40103, + NoPower => 40301, + PermissionDenied => 40302, + Forbidden(_) => 40304, + RepoForBidAccess => 40303, + NotFound(_) => 40401, + UserNotFound => 40402, + ProjectNotFound => 40403, + RepoNotFound => 40404, + UserNameExists => 40901, + EmailExists => 40902, + ProjectNameAlreadyExists => 40903, + RepoNameAlreadyExists => 40905, + TwoFactorAlreadyEnabled => 40904, + TwoFactorRequired => 42801, + DoMainNotSet => 50001, + TxnError => 50002, + RsaGenerationError => 50003, + PasswordHashError(_) => 50004, + DatabaseError(_) => 50005, + GitError(_) => 50006, + Io(_) => 50007, + InternalError => 50008, + InternalServerError(_) => 50009, + AvatarUploadError(_) => 50010, + WorkspaceNotFound => 40405, + WorkspaceSlugAlreadyExists => 40906, + WorkspaceNameAlreadyExists => 40907, + NotWorkspaceMember => 40305, + WorkspaceInviteTokenInvalid => 40006, + WorkspaceInviteExpired => 40007, + WorkspaceInviteAlreadyAccepted => 40908, + Conflict(_) => 40909, + } + } + + pub const fn http_status_code(&self) -> u16 { + use AppError::*; + match self { + BadRequest(_) => 400, + CaptchaError => 400, + SerdeError(_) => 400, + RsaDecodeError => 400, + RoleParseError => 400, + TwoFactorNotSetup => 400, + TwoFactorNotEnabled => 400, + WorkspaceInviteTokenInvalid => 400, + WorkspaceInviteExpired => 400, + Unauthorized => 401, + InvalidTwoFactorCode => 401, + InvalidPassword => 401, + NoPower => 403, + PermissionDenied => 403, + Forbidden(_) => 403, + RepoForBidAccess => 403, + NotWorkspaceMember => 403, + NotFound(_) => 404, + UserNotFound => 404, + ProjectNotFound => 404, + RepoNotFound => 404, + WorkspaceNotFound => 404, + UserNameExists => 409, + EmailExists => 409, + ProjectNameAlreadyExists => 409, + RepoNameAlreadyExists => 409, + TwoFactorAlreadyEnabled => 409, + WorkspaceSlugAlreadyExists => 409, + WorkspaceNameAlreadyExists => 409, + WorkspaceInviteAlreadyAccepted => 409, + Conflict(_) => 409, + TwoFactorRequired => 428, + DoMainNotSet => 500, + TxnError => 500, + RsaGenerationError => 500, + PasswordHashError(_) => 500, + DatabaseError(_) => 500, + GitError(_) => 500, + Io(_) => 500, + InternalError => 500, + InternalServerError(_) => 500, + AvatarUploadError(_) => 500, + } + } + + pub const fn slug(&self) -> &'static str { + use AppError::*; + match self { + Unauthorized => "unauthorized", + UserNotFound => "user_not_found", + ProjectNotFound => "project_not_found", + RepoNotFound => "repo_not_found", + NotFound(_) => "not_found", + TwoFactorRequired => "two_factor_required", + UserNameExists => "username_exists", + EmailExists => "email_exists", + ProjectNameAlreadyExists => "project_name_exists", + RepoNameAlreadyExists => "repo_name_exists", + CaptchaError => "captcha_error", + BadRequest(_) => "bad_request", + SerdeError(_) => "serde_error", + RsaDecodeError => "rsa_decode_error", + RoleParseError => "role_parse_error", + TwoFactorNotSetup => "two_factor_not_setup", + TwoFactorNotEnabled => "two_factor_not_enabled", + InvalidTwoFactorCode => "invalid_two_factor_code", + InvalidPassword => "invalid_password", + NoPower => "no_power", + PermissionDenied => "permission_denied", + Forbidden(_) => "forbidden", + RepoForBidAccess => "repo_forbidden", + TwoFactorAlreadyEnabled => "two_factor_already_enabled", + WorkspaceNotFound => "workspace_not_found", + WorkspaceSlugAlreadyExists => "workspace_slug_exists", + WorkspaceNameAlreadyExists => "workspace_name_exists", + NotWorkspaceMember => "not_workspace_member", + WorkspaceInviteTokenInvalid => "workspace_invite_token_invalid", + WorkspaceInviteExpired => "workspace_invite_expired", + WorkspaceInviteAlreadyAccepted => "workspace_invite_already_accepted", + Conflict(_) => "conflict", + DoMainNotSet => "domain_not_set", + TxnError => "transaction_error", + RsaGenerationError => "rsa_generation_error", + PasswordHashError(_) => "password_hash_error", + DatabaseError(_) => "database_error", + GitError(_) => "git_error", + Io(_) => "io_error", + InternalError => "internal_error", + InternalServerError(_) => "internal_server_error", + AvatarUploadError(_) => "avatar_upload_error", + } + } + + pub fn user_message(&self) -> String { + match self { + AppError::NotFound(s) => s.clone(), + AppError::BadRequest(s) => s.clone(), + AppError::Forbidden(s) => s.clone(), + AppError::DatabaseError(_) => "A database error occurred".to_string(), + AppError::PasswordHashError(_) => "A password processing error occurred".to_string(), + AppError::GitError(_) => "A git operation failed".to_string(), + AppError::SerdeError(_) => "A data parsing error occurred".to_string(), + AppError::Io(_) => "A file system error occurred".to_string(), + AppError::InternalServerError(_) => "An internal error occurred".to_string(), + AppError::AvatarUploadError(_) => "An avatar upload error occurred".to_string(), + AppError::Conflict(_) => "Resource conflict".to_string(), + _ => self.slug().to_string(), + } + } +} + +impl From for AppError { + fn from(value: argon2::password_hash::Error) -> Self { + AppError::PasswordHashError(value.to_string()) + } +} + +impl From for AppError { + fn from(value: sea_orm::error::DbErr) -> Self { + AppError::DatabaseError(value.to_string()) + } +} + +impl From for AppError { + fn from(value: GitError) -> Self { + AppError::GitError(value) + } +} + +impl From for AppError { + fn from(value: serde_json::Error) -> Self { + AppError::SerdeError(value) + } +} + +impl From for AppError { + fn from(value: std::io::Error) -> Self { + AppError::Io(value) + } +} + +impl From for AppError { + fn from(value: anyhow::Error) -> Self { + AppError::InternalServerError(value.to_string()) + } +} +impl From for AppError { + fn from(err: room::RoomError) -> Self { + use room::RoomError; + match err { + RoomError::Database(e) => { + println!("database error: {}", e); + AppError::DatabaseError(e.to_string()) + } + RoomError::NotFound(s) => AppError::NotFound(s), + RoomError::Unauthorized => AppError::Unauthorized, + RoomError::NoPower => AppError::NoPower, + RoomError::RateLimited(s) => AppError::BadRequest(s), + RoomError::BadRequest(s) => AppError::BadRequest(s), + RoomError::RoleParseError => AppError::RoleParseError, + RoomError::Internal(s) => AppError::InternalServerError(s), + } + } +} diff --git a/libs/service/git/archive.rs b/libs/service/git/archive.rs new file mode 100644 index 0000000..901f8cb --- /dev/null +++ b/libs/service/git/archive.rs @@ -0,0 +1,303 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{ArchiveEntry, ArchiveFormat, ArchiveSummary}; +use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct ArchiveQuery { + pub commit_oid: String, + pub format: String, + #[serde(default)] + pub prefix: Option, + #[serde(default)] + pub max_depth: Option, + #[serde(default)] + pub path_filter: Option, +} +impl ArchiveQuery { + fn to_archive_format(&self) -> Result { + match self.format.to_lowercase().as_str() { + "tar" => Ok(ArchiveFormat::Tar), + "tar.gz" | "tgz" => Ok(ArchiveFormat::TarGz), + "zip" => Ok(ArchiveFormat::Zip), + _ => Err(AppError::InternalServerError(format!( + "unsupported archive format: {}", + self.format + ))), + } + } + fn cache_key(&self) -> String { + let prefix = self.prefix.as_deref().unwrap_or(""); + let filter = self.path_filter.as_deref().unwrap_or(""); + let depth = self.max_depth.map_or("0".to_string(), |d| d.to_string()); + if prefix.is_empty() && filter.is_empty() && self.max_depth.is_none() { + String::new() + } else { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + let mut h = DefaultHasher::new(); + (prefix, filter, depth).hash(&mut h); + format!("-{:x}", h.finish()) + } + } +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveListResponse { + pub commit_oid: String, + pub entries: Vec, + pub total_entries: usize, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveEntryResponse { + pub path: String, + pub oid: String, + pub size: u64, + pub mode: u32, +} +impl From for ArchiveEntryResponse { + fn from(e: ArchiveEntry) -> Self { + Self { + path: e.path, + oid: e.oid, + size: e.size, + mode: e.mode, + } + } +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveSummaryResponse { + pub commit_oid: String, + pub format: String, + pub total_entries: usize, + pub total_size: u64, +} +impl From for ArchiveSummaryResponse { + fn from(s: ArchiveSummary) -> Self { + let format_str = match s.format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + Self { + commit_oid: s.commit_oid, + format: format_str.to_string(), + total_entries: s.total_entries, + total_size: s.total_size, + } + } +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ArchiveResponse { + pub commit_oid: String, + pub format: String, + pub size: usize, + pub data: String, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveCachedResponse { + pub commit_oid: String, + pub format: String, + pub cached: bool, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveInvalidateResponse { + pub commit_oid: String, + pub format: String, + pub invalidated: bool, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ArchiveInvalidateAllResponse { + pub commit_oid: String, + pub count: usize, +} +impl AppService { + pub async fn git_archive( + &self, + namespace: String, + repo_name: String, + query: ArchiveQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let format = query.to_archive_format()?; + let format_str = match format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + let commit_oid = git::CommitOid::new(&query.commit_oid); + let cache_key = format!( + "git:archive:{}:{}:{}:{}:{}", + namespace, + repo_name, + query.commit_oid, + format_str, + query.cache_key(), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + let domain = git::GitDomain::from_model(repo)?; + let opts = git::ArchiveOptions::new() + .prefix(query.prefix.as_deref().unwrap_or("")) + .max_depth(query.max_depth.unwrap_or(usize::MAX)); + let data = domain.archive(&commit_oid, format, Some(opts))?; + let data_b64 = BASE64.encode(&data); + let response = ArchiveResponse { + commit_oid: query.commit_oid, + format: format_str.to_string(), + size: data.len(), + data: data_b64, + }; + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + Ok(response) + } + pub async fn git_archive_list( + &self, + namespace: String, + repo_name: String, + query: ArchiveQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let commit_oid = git::CommitOid::new(&query.commit_oid); + let opts = git::ArchiveOptions::new() + .prefix(query.prefix.as_deref().unwrap_or("")) + .max_depth(query.max_depth.unwrap_or(usize::MAX)); + let domain = git::GitDomain::from_model(repo)?; + let entries = domain.archive_list(&commit_oid, Some(opts))?; + let entry_responses: Vec = entries + .into_iter() + .map(ArchiveEntryResponse::from) + .collect(); + let total_entries = entry_responses.len(); + Ok(ArchiveListResponse { + commit_oid: query.commit_oid, + entries: entry_responses, + total_entries, + }) + } + pub async fn git_archive_summary( + &self, + namespace: String, + repo_name: String, + query: ArchiveQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let format = query.to_archive_format()?; + let format_str = match format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + let commit_oid = git::CommitOid::new(&query.commit_oid); + let opts = git::ArchiveOptions::new() + .prefix(query.prefix.as_deref().unwrap_or("")) + .max_depth(query.max_depth.unwrap_or(usize::MAX)); + let domain = git::GitDomain::from_model(repo)?; + let mut summary = domain.archive_summary(&commit_oid, format, Some(opts))?; + summary.format = format; + Ok(ArchiveSummaryResponse { + commit_oid: query.commit_oid, + format: format_str.to_string(), + total_entries: summary.total_entries, + total_size: summary.total_size, + }) + } + pub async fn git_archive_cached( + &self, + namespace: String, + repo_name: String, + query: ArchiveQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let format = query.to_archive_format()?; + let format_str = match format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + let commit_oid = git::CommitOid::new(&query.commit_oid); + let opts = git::ArchiveOptions::new() + .prefix(query.prefix.as_deref().unwrap_or("")) + .max_depth(query.max_depth.unwrap_or(usize::MAX)); + let domain = git::GitDomain::from_model(repo)?; + let cached = domain.archive_cached(&commit_oid, format, Some(opts)); + Ok(ArchiveCachedResponse { + commit_oid: query.commit_oid, + format: format_str.to_string(), + cached, + }) + } + pub async fn git_archive_invalidate( + &self, + namespace: String, + repo_name: String, + query: ArchiveQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let format = query.to_archive_format()?; + let format_str = match format { + ArchiveFormat::Tar => "tar", + ArchiveFormat::TarGz => "tar.gz", + ArchiveFormat::Zip => "zip", + }; + let commit_oid = git::CommitOid::new(&query.commit_oid); + let opts = git::ArchiveOptions::new() + .prefix(query.prefix.as_deref().unwrap_or("")) + .max_depth(query.max_depth.unwrap_or(usize::MAX)); + let domain = git::GitDomain::from_model(repo)?; + let invalidated = domain.archive_invalidate(&commit_oid, format, Some(opts))?; + Ok(ArchiveInvalidateResponse { + commit_oid: query.commit_oid, + format: format_str.to_string(), + invalidated, + }) + } + pub async fn git_archive_invalidate_all( + &self, + namespace: String, + repo_name: String, + commit_oid: String, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let commit = git::CommitOid::new(&commit_oid); + let domain = git::GitDomain::from_model(repo)?; + let count = domain.archive_invalidate_all(&commit)?; + Ok(ArchiveInvalidateAllResponse { commit_oid, count }) + } +} diff --git a/libs/service/git/blame.rs b/libs/service/git/blame.rs new file mode 100644 index 0000000..e42dd96 --- /dev/null +++ b/libs/service/git/blame.rs @@ -0,0 +1,245 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{BlameOptions, CommitBlameHunk, CommitBlameLine}; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; + +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct BlameQuery { + #[serde(default)] + pub commit_oid: String, + #[serde(default)] + pub path: String, + #[serde(default)] + pub min_line: Option, + #[serde(default)] + pub max_line: Option, + #[serde(default)] + pub track_copies_same_file: bool, + #[serde(default)] + pub track_copies_same_commit_moves: bool, + #[serde(default)] + pub ignore_whitespace: bool, +} + +impl BlameQuery { + fn to_blame_options(&self) -> BlameOptions { + let mut opts = BlameOptions::new(); + if let Some(min) = self.min_line { + opts.min_line = Some(min); + } + if let Some(max) = self.max_line { + opts.max_line = Some(max); + } + opts.track_copies_same_file = self.track_copies_same_file; + opts.track_copies_same_commit_moves = self.track_copies_same_commit_moves; + opts.ignore_whitespace = self.ignore_whitespace; + opts + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct BlameHunkResponse { + pub commit_oid: String, + pub final_start_line: u32, + pub final_lines: u32, + pub orig_start_line: u32, + pub orig_lines: u32, + pub boundary: bool, + pub orig_path: Option, +} + +impl From for BlameHunkResponse { + fn from(h: CommitBlameHunk) -> Self { + Self { + commit_oid: h.commit_oid.to_string(), + final_start_line: h.final_start_line, + final_lines: h.final_lines, + orig_start_line: h.orig_start_line, + orig_lines: h.orig_lines, + boundary: h.boundary, + orig_path: h.orig_path, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct BlameLineResponse { + pub commit_oid: String, + pub line_no: u32, + pub content: String, + pub orig_path: Option, +} + +impl From for BlameLineResponse { + fn from(l: CommitBlameLine) -> Self { + Self { + commit_oid: l.commit_oid.to_string(), + line_no: l.line_no, + content: l.content, + orig_path: l.orig_path, + } + } +} + +impl AppService { + pub async fn git_blame_file( + &self, + namespace: String, + repo_name: String, + query: BlameQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let commit_oid_str = query.commit_oid.clone(); + let path = query.path.clone(); + let blame_opts = query.to_blame_options(); + let cache_key = format!( + "git:cache:{}:{}:{}:{}:{:?}:{:?}:{}:{}:{}", + namespace, + repo_name, + path, + query.commit_oid, + blame_opts.max_line, + blame_opts.min_line, + blame_opts.ignore_whitespace, + blame_opts.track_copies_same_file, + blame_opts.track_copies_same_commit_moves, + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo_clone = repo.clone(); + let path_clone = path.clone(); + let opts_clone = blame_opts.clone(); + + let hunks: Vec = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + let commit_oid = git::CommitOid::new(&commit_oid_str); + domain.blame_file(&commit_oid, &path_clone, Some(opts_clone)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response: Vec = + hunks.into_iter().map(BlameHunkResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_blame_lines( + &self, + namespace: String, + repo_name: String, + query: BlameQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let commit_oid_str = query.commit_oid.clone(); + let path = query.path.clone(); + let blame_opts = query.to_blame_options(); + let cache_key = format!( + "git:cache:blame_lines:{}:{}:{}:{}:{:?}:{:?}:{}:{}:{}", + namespace, + repo_name, + path, + query.commit_oid, + blame_opts.max_line, + blame_opts.min_line, + blame_opts.ignore_whitespace, + blame_opts.track_copies_same_file, + blame_opts.track_copies_same_commit_moves, + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo_clone = repo.clone(); + let path_clone = path.clone(); + let opts_clone = blame_opts.clone(); + + let lines: Vec = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + let commit_oid = git::CommitOid::new(&commit_oid_str); + domain.blame_lines(&commit_oid, &path_clone, Some(opts_clone)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response: Vec = + lines.into_iter().map(BlameLineResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_blame_hunk_at( + &self, + namespace: String, + repo_name: String, + query: BlameQuery, + line_no: usize, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let commit_oid_str = query.commit_oid.clone(); + let path = query.path.clone(); + + let hunk = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let commit_oid = git::CommitOid::new(&commit_oid_str); + domain.blame_hunk_at(&commit_oid, &path, line_no) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlameHunkResponse::from(hunk)) + } +} diff --git a/libs/service/git/blob.rs b/libs/service/git/blob.rs new file mode 100644 index 0000000..fd724d1 --- /dev/null +++ b/libs/service/git/blob.rs @@ -0,0 +1,485 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::BlobInfo; +use crate::git_spawn; +use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; + +const BLOB_CACHE_SIZE_LIMIT: usize = 512 * 1024; + +const README_SIZE_LIMIT: usize = 1024 * 1024; + +const README_VARIANTS: &[(&str, bool, bool)] = &[ + ("README.md", true, true), + ("README.markdown", true, true), + ("README.mkd", true, true), + ("README.mkdn", true, true), + ("README.mdown", true, true), + ("README.rst", false, true), + ("README.adoc", false, true), + ("README.txt", true, true), + ("README.md.txt", true, true), + ("readme.md", true, true), + ("Readme.md", true, true), + ("README.MD", true, true), + ("readme.markdown", true, true), + ("Readme", false, true), + ("readme", false, true), + ("README", false, true), + ("readme.rst", false, true), + ("readme.txt", false, true), + ("README.md.orig", true, true), + ("README.md.bak", true, true), + ("docs/README.md", true, false), + ("doc/README.md", true, false), + ("docs/README", false, false), + ("doc/README", false, false), + ("docs/README.markdown", true, false), + ("doc/README.markdown", true, false), + ("docs/readme.md", true, false), + ("doc/readme.md", true, false), + (".github/README.md", true, false), + ("wiki/README.md", true, false), + ("site/README.md", true, false), +]; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BlobGetQuery { + #[serde(default)] + pub oid: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BlobInfoResponse { + pub oid: String, + pub size: usize, + pub is_binary: bool, +} + +impl From for BlobInfoResponse { + fn from(b: BlobInfo) -> Self { + Self { + oid: b.oid.to_string(), + size: b.size, + is_binary: b.is_binary, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct BlobContentResponse { + pub oid: String, + pub size: usize, + pub is_binary: bool, + pub content: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BlobExistsResponse { + pub oid: String, + pub exists: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BlobIsBinaryResponse { + pub oid: String, + pub is_binary: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BlobSizeResponse { + pub oid: String, + pub size: usize, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BlobCreateResponse { + pub oid: String, + pub size: usize, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BlobCreateRequest { + pub data: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct GitReadmeQuery { + pub r#ref: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct GitReadmeResponse { + pub path: Option, + pub content: Option, + pub size: Option, + pub encoding: Option, + #[serde(default)] + pub truncated: bool, + #[serde(default)] + pub is_binary: bool, +} + +impl AppService { + pub async fn git_readme( + &self, + namespace: String, + repo_name: String, + query: GitReadmeQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let rev = query.r#ref.unwrap_or_else(|| "HEAD".to_string()); + + let tree_oid: git::CommitOid = { + let rev_clone = rev.clone(); + git_spawn!(repo, domain -> { + domain.resolve_rev(&rev_clone) + })? + .into() + }; + + let (root_blobs, subdirs): ( + std::collections::HashMap, + std::collections::HashMap, + ) = { + let oid = tree_oid; + git_spawn!(repo, domain -> { + let entries = domain.tree_list(&oid)?; + let mut blobs: std::collections::HashMap = + Default::default(); + let mut dirs: std::collections::HashMap = + Default::default(); + for entry in entries { + let name_lower = entry.name.to_lowercase(); + if entry.kind == "tree" { + dirs.insert(name_lower, (entry.name.clone(), entry.oid)); + } else if entry.kind == "blob" { + blobs.insert(name_lower, (entry.name.clone(), entry.oid)); + } + } + Ok::<_, AppError>((blobs, dirs)) + })? + }; + + let subdir_blobs: std::collections::HashMap< + String, + std::collections::HashMap, + > = { + let repo_clone = repo.clone(); + let subdirs_clone = subdirs.clone(); + let mut result: std::collections::HashMap< + String, + std::collections::HashMap, + > = Default::default(); + + for (subdir_lower, (subdir_original, subdir_oid)) in subdirs_clone.clone() { + let interested = matches!( + subdir_lower.as_str(), + "docs" | "doc" | ".github" | "wiki" | "site" + ); + if !interested { + continue; + } + let oid = subdir_oid; + let repo_inner = repo_clone.clone(); + let entries: std::collections::HashMap = git_spawn!(repo_inner, domain -> { + let entries = domain.tree_list(&oid)?; + Ok::, AppError>( + entries.into_iter() + .filter(|e| e.kind == "blob") + .map(|e| (e.name.to_lowercase(), (e.name.clone(), e.oid))) + .collect(), + ) + })?; + result.insert(subdir_original.clone(), entries); + } + + result + }; + + #[derive(Clone)] + struct Candidate { + path: String, + oid: git::CommitOid, + score: isize, + } + + let mut best: Option = None; + + for &(variant, is_markdown, is_root) in README_VARIANTS { + let lookup = variant.to_lowercase(); + + let found: Option<(String, git::CommitOid)> = if is_root { + root_blobs.get(&lookup).map(|(n, o)| (n.clone(), o.clone())) + } else { + lookup.split_once('/').and_then(|(subdir, rest)| { + subdir_blobs.get(subdir).and_then(|subdir_map| { + subdir_map.get(rest).map(|(n, o)| (n.clone(), o.clone())) + }) + }) + }; + + let Some((_blob_name, oid)) = found else { + continue; + }; + + let score = if is_root { 1000 } else { 0 } + if is_markdown { 100 } else { 0 } + - variant.len() as isize; + + let better = best.as_ref().map(|b| score > b.score).unwrap_or(true); + + if better { + best = Some(Candidate { + path: variant.to_string(), + oid, + score, + }); + } + } + + let Some(candidate) = best else { + return Ok(GitReadmeResponse { + path: None, + content: None, + size: None, + encoding: None, + truncated: false, + is_binary: false, + }); + }; + + let (raw_bytes, is_binary, total_size) = { + let oid = candidate.oid; + git_spawn!(repo, domain -> { + let content = domain.blob_content(&oid)?; + Ok::<_, AppError>((content.content, content.is_binary, content.size)) + })? + }; + + if is_binary { + return Ok(GitReadmeResponse { + path: Some(candidate.path), + content: None, + size: Some(total_size), + encoding: Some("binary".to_string()), + truncated: false, + is_binary: true, + }); + } + + let truncated = raw_bytes.len() > README_SIZE_LIMIT; + + let to_encode: Vec = if truncated { + let mut cut = raw_bytes[..README_SIZE_LIMIT].to_vec(); + while !cut.is_empty() && std::str::from_utf8(&cut).is_err() { + cut.pop(); + } + cut + } else { + raw_bytes + }; + + let (content_b64, is_binary_final, encoding) = match std::str::from_utf8(&to_encode) { + Ok(_) => (BASE64.encode(&to_encode), false, "base64".to_string()), + Err(_) => (BASE64.encode(&to_encode), true, "binary".to_string()), + }; + + Ok(GitReadmeResponse { + path: Some(candidate.path), + content: Some(content_b64), + size: Some(total_size), + encoding: Some(encoding), + truncated, + is_binary: is_binary_final, + }) + } + + pub async fn git_blob_get( + &self, + namespace: String, + repo_name: String, + query: BlobGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let info = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.blob_get(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlobInfoResponse::from(info)) + } + + pub async fn git_blob_exists( + &self, + namespace: String, + repo_name: String, + query: BlobGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let exists = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + Ok::<_, git::GitError>(domain.blob_exists(&oid)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlobExistsResponse { + oid: query.oid, + exists, + }) + } + + pub async fn git_blob_is_binary( + &self, + namespace: String, + repo_name: String, + query: BlobGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let is_binary = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.blob_is_binary(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlobIsBinaryResponse { + oid: query.oid, + is_binary, + }) + } + + pub async fn git_blob_content( + &self, + namespace: String, + repo_name: String, + query: BlobGetQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!("git:blob:{}:{}:{}", namespace, repo_name, query.oid); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + + let repo_clone = repo.clone(); + let oid_str = query.oid.clone(); + + let content = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + let oid = git::CommitOid::new(&oid_str); + domain.blob_content(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response = BlobContentResponse { + oid: query.oid.clone(), + size: content.size, + is_binary: content.is_binary, + content: BASE64.encode(&content.content), + }; + + // Only cache blobs smaller than the size limit to prevent memory exhaustion + if response.size < BLOB_CACHE_SIZE_LIMIT { + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + } + + Ok(response) + } + + pub async fn git_blob_size( + &self, + namespace: String, + repo_name: String, + query: BlobGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let size = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.blob_size(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlobSizeResponse { + oid: query.oid, + size, + }) + } + + pub async fn git_blob_create( + &self, + namespace: String, + repo_name: String, + request: BlobCreateRequest, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let data = BASE64 + .decode(&request.data) + .map_err(|_| AppError::InternalServerError("invalid base64 data".to_string()))?; + + let repo_clone = repo.clone(); + let data_clone = data.clone(); + + let oid = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + domain.blob_create(&data_clone) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(BlobCreateResponse { + oid: oid.to_string(), + size: data.len(), + }) + } +} diff --git a/libs/service/git/blocking.rs b/libs/service/git/blocking.rs new file mode 100644 index 0000000..f4661a8 --- /dev/null +++ b/libs/service/git/blocking.rs @@ -0,0 +1,15 @@ +#[macro_export] +macro_rules! git_spawn { + ($repo:expr, $domain:ident -> $body:expr) => {{ + let repo_clone = $repo.clone(); + tokio::task::spawn_blocking(move || { + let $domain = git::GitDomain::from_model(repo_clone)?; + $body + }) + .await + .map_err(|e| { + crate::error::AppError::InternalServerError(format!("Task join error: {}", e)) + })? + .map_err(crate::error::AppError::from) + }}; +} diff --git a/libs/service/git/branch.rs b/libs/service/git/branch.rs new file mode 100644 index 0000000..1e98a59 --- /dev/null +++ b/libs/service/git/branch.rs @@ -0,0 +1,915 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{BranchDiff, BranchInfo, BranchSummary}; +use models::repos::repo; +use models::repos::repo as repo_model; +use models::repos::repo_branch; +use models::repos::repo_branch_protect; +use sea_orm::prelude::Expr; +use sea_orm::{ColumnTrait, EntityTrait, ExprTrait, PaginatorTrait, QueryFilter}; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchListQuery { + #[serde(default)] + pub remote_only: Option, + #[serde(default)] + pub all: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchInfoResponse { + pub name: String, + pub oid: String, + pub is_head: bool, + pub is_remote: bool, + pub is_current: bool, + pub upstream: Option, +} + +impl From for BranchInfoResponse { + fn from(b: BranchInfo) -> Self { + Self { + name: b.name, + oid: b.oid.to_string(), + is_head: b.is_head, + is_remote: b.is_remote, + is_current: b.is_current, + upstream: b.upstream, + } + } +} + +impl From for BranchInfoResponse { + fn from(b: repo_branch::Model) -> Self { + // is_remote: full ref path starts with "refs/remotes/" + let is_remote = b.name.starts_with("refs/remotes/"); + // shorthand name for display (strip prefix) + let name = if b.name.starts_with("refs/heads/") { + b.name + .strip_prefix("refs/heads/") + .unwrap_or(&b.name) + .to_string() + } else if b.name.starts_with("refs/remotes/") { + b.name + .strip_prefix("refs/remotes/") + .unwrap_or(&b.name) + .to_string() + } else { + b.name.clone() + }; + Self { + name, + oid: b.oid, + is_head: b.head, + is_remote, + // is_current: not stored in DB, always false when from DB + is_current: false, + // upstream: stored as pattern "refs/remotes/{}/{}", return as-is + upstream: b.upstream, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchSummaryResponse { + pub local_count: usize, + pub remote_count: usize, + pub all_count: usize, +} + +impl From for BranchSummaryResponse { + fn from(s: BranchSummary) -> Self { + Self { + local_count: s.local_count, + remote_count: s.remote_count, + all_count: s.all_count, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchDiffResponse { + pub ahead: usize, + pub behind: usize, + pub diverged: bool, +} + +impl From for BranchDiffResponse { + fn from(d: BranchDiff) -> Self { + Self { + ahead: d.ahead, + behind: d.behind, + diverged: d.diverged, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchExistsResponse { + pub name: String, + pub exists: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchIsHeadResponse { + pub name: String, + pub is_head: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchIsDetachedResponse { + pub is_detached: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchIsMergedResponse { + pub branch: String, + pub into: String, + pub is_merged: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchMergeBaseResponse { + pub branch1: String, + pub branch2: String, + pub base: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchIsAncestorResponse { + pub child: String, + pub ancestor: String, + pub is_ancestor: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchFastForwardResponse { + pub oid: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchTrackingDiffResponse { + pub name: String, + pub ahead: usize, + pub behind: usize, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BranchIsConflictedResponse { + pub is_conflicted: bool, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchCreateRequest { + pub name: String, + pub oid: Option, + #[serde(default)] + pub force: bool, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchRenameRequest { + pub old_name: String, + pub new_name: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchMoveRequest { + pub name: String, + pub new_name: String, + #[serde(default)] + pub force: bool, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchSetUpstreamRequest { + pub name: String, + pub upstream: Option, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchDiffQuery { + pub local: String, + pub remote: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchIsMergedQuery { + pub branch: String, + pub into: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchIsAncestorQuery { + pub child: String, + pub ancestor: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchMergeBaseQuery { + pub branch1: String, + pub branch2: String, +} + +macro_rules! git_spawn { + ($repo:expr, $domain:ident -> $body:expr) => {{ + let repo_clone = $repo.clone(); + tokio::task::spawn_blocking(move || { + let $domain = git::GitDomain::from_model(repo_clone)?; + $body + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from) + }}; +} + +impl AppService { + /// Check and enforce branch protection rules before deleting (or renaming/moving away from) a branch. + async fn check_protection_for_deletion( + &self, + repo_id: Uuid, + branch: &str, + ) -> Result<(), AppError> { + let protection = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(repo_id)) + .filter(repo_branch_protect::Column::Branch.eq(branch)) + .one(&self.db) + .await + .map_err(AppError::from)?; + + if let Some(rule) = protection { + if rule.forbid_deletion { + return Err(AppError::Forbidden(format!( + "Deletion of protected branch '{}' is forbidden", + branch + ))); + } + } + Ok(()) + } + + /// Check and enforce branch protection rules before creating (or renaming/moving to) a branch. + async fn check_protection_for_push(&self, repo_id: Uuid, branch: &str) -> Result<(), AppError> { + let protection = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(repo_id)) + .filter(repo_branch_protect::Column::Branch.eq(branch)) + .one(&self.db) + .await + .map_err(AppError::from)?; + + if let Some(rule) = protection { + if rule.forbid_push { + return Err(AppError::Forbidden(format!( + "Push to protected branch '{}' is forbidden", + branch + ))); + } + } + Ok(()) + } + pub async fn git_branch_list( + &self, + namespace: String, + repo_name: String, + query: BranchListQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let branches = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter({ + if query.all.unwrap_or(false) { + // all: no filter + Expr::value(true) + } else if query.remote_only.unwrap_or(false) { + Expr::col(repo_branch::Column::Name).like("refs/remotes/%") + } else { + Expr::col(repo_branch::Column::Name).like("refs/heads/%") + } + }) + .all(&self.db) + .await + .map_err(AppError::from)?; + + Ok(branches.into_iter().map(BranchInfoResponse::from).collect()) + } + + pub async fn git_branch_summary( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let local_count: usize = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter(Expr::col(repo_branch::Column::Name).like("refs/heads/%")) + .count(&self.db) + .await + .map_err(AppError::from)? as usize; + + let remote_count: usize = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter(Expr::col(repo_branch::Column::Name).like("refs/remotes/%")) + .count(&self.db) + .await + .map_err(AppError::from)? as usize; + + Ok(BranchSummaryResponse { + local_count, + remote_count, + all_count: local_count + remote_count, + }) + } + + pub async fn git_branch_get( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Normalize: try shorthand forms, then full ref paths + let candidates = if name.starts_with("refs/") { + vec![name.clone()] + } else if name.starts_with("heads/") { + vec![format!("refs/{}", name)] + } else { + vec![ + format!("refs/heads/{}", name), + format!("refs/remotes/{}", name), + name.clone(), + ] + }; + + for candidate in &candidates { + if let Some(b) = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter(repo_branch::Column::Name.eq(candidate)) + .one(&self.db) + .await + .map_err(AppError::from)? + { + return Ok(BranchInfoResponse::from(b)); + } + } + + // Fallback to git + let name_clone = name.clone(); + let info = git_spawn!(repo, domain -> { + domain.branch_get(&name_clone) + })?; + Ok(BranchInfoResponse::from(info)) + } + + pub async fn git_branch_current( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Try git first (normal path). Errors (e.g. empty repo) fall through to DB fallback. + if let Ok(Some(b)) = git_spawn!(repo, domain -> { + domain.branch_current() + }) { + return Ok(Some(BranchInfoResponse::from(b))); + } + + // Fallback: repo may be empty or sync not yet run, but default_branch + // is already set in repo metadata. Look it up in DB. + // repo.default_branch is shorthand (e.g. "main"), DB stores full ref (e.g. "refs/heads/main") + if !repo.default_branch.is_empty() { + let full_ref = format!("refs/heads/{}", repo.default_branch); + if let Some(branch) = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter(repo_branch::Column::Name.eq(&full_ref)) + .one(&self.db) + .await + .map_err(AppError::from)? + { + return Ok(Some(BranchInfoResponse::from(branch))); + } + } + + Ok(None) + } + + pub async fn git_branch_exists( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Try shorthand → full ref path candidates + let candidates = if name.starts_with("refs/") { + vec![name.clone()] + } else { + vec![ + format!("refs/heads/{}", name), + format!("refs/remotes/{}", name), + name.clone(), + ] + }; + + for candidate in &candidates { + let found = repo_branch::Entity::find() + .filter(repo_branch::Column::Repo.eq(repo.id)) + .filter(repo_branch::Column::Name.eq(candidate)) + .one(&self.db) + .await + .map_err(AppError::from)?; + if found.is_some() { + return Ok(BranchExistsResponse { name, exists: true }); + } + } + + // Fallback to git + let name_clone = name.clone(); + let exists = git_spawn!(repo, domain -> { + Ok::<_, git::GitError>(domain.branch_exists(&name_clone)) + })?; + + Ok(BranchExistsResponse { name, exists }) + } + + pub async fn git_branch_is_head( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let name_clone = name.clone(); + + let is_head = git_spawn!(repo, domain -> { + domain.branch_is_head(&name_clone) + })?; + + Ok(BranchIsHeadResponse { name, is_head }) + } + + pub async fn git_branch_is_detached( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let is_detached = git_spawn!(repo, domain -> { + Ok::<_, git::GitError>(domain.branch_is_detached()) + })?; + + Ok(BranchIsDetachedResponse { is_detached }) + } + + pub async fn git_branch_upstream( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let name_clone = name.clone(); + + let upstream = git_spawn!(repo, domain -> { + domain.branch_upstream(&name_clone) + })?; + + Ok(upstream.map(BranchInfoResponse::from)) + } + + pub async fn git_branch_diff( + &self, + namespace: String, + repo_name: String, + query: BranchDiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let local = query.local.clone(); + let remote = query.remote.clone(); + + let diff = git_spawn!(repo, domain -> { + domain.branch_diff(&local, &remote) + })?; + + Ok(BranchDiffResponse::from(diff)) + } + + pub async fn git_branch_tracking_difference( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let name_clone = name.clone(); + + let (ahead, behind) = git_spawn!(repo, domain -> { + domain.branch_tracking_difference(&name_clone) + })?; + + Ok(BranchTrackingDiffResponse { + name, + ahead, + behind, + }) + } + + pub async fn git_branch_create( + &self, + namespace: String, + repo_name: String, + request: BranchCreateRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + self.check_protection_for_push(repo.id, &request.name) + .await?; + let name = request.name.clone(); + let force = request.force; + let name_for_spawn = name.clone(); + + let info = if let Some(oid) = request.oid { + let commit_oid = git::CommitOid::new(&oid); + let name_clone = name_for_spawn.clone(); + git_spawn!(repo, domain -> { + domain.branch_create(&name_clone, &commit_oid, force) + })? + } else { + git_spawn!(repo, domain -> { + domain.branch_create_from_head(&name_for_spawn, force) + })? + }; + + let response = BranchInfoResponse::from(info); + + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "branch_create".to_string(), + title: format!("{} created branch '{}'", user_uid, name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"branch_name": name})), + is_private: false, + }, + ) + .await; + + Ok(response) + } + + pub async fn git_branch_delete( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + self.check_protection_for_deletion(repo.id, &name).await?; + let name_for_spawn = name.clone(); + + git_spawn!(repo, domain -> { + domain.branch_delete(&name_for_spawn) + })?; + + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "branch_delete".to_string(), + title: format!("{} deleted branch '{}'", user_uid, name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"branch_name": name})), + is_private: false, + }, + ) + .await; + + Ok(()) + } + + pub async fn git_branch_delete_remote( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + self.check_protection_for_deletion(repo.id, &name).await?; + let name_clone = name.clone(); + + git_spawn!(repo, domain -> { + domain.branch_delete_remote(&name_clone) + })?; + + Ok(()) + } + + pub async fn git_branch_rename( + &self, + namespace: String, + repo_name: String, + request: BranchRenameRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + // Check: source branch cannot be deleted if protected + self.check_protection_for_deletion(repo.id, &request.old_name) + .await?; + // Check: target branch cannot be pushed if protected + self.check_protection_for_push(repo.id, &request.new_name) + .await?; + let old_name = request.old_name.clone(); + let new_name = request.new_name.clone(); + let old_name_for_spawn = old_name.clone(); + let new_name_for_spawn = new_name.clone(); + + let info = git_spawn!(repo, domain -> { + domain.branch_rename(&old_name_for_spawn, &new_name_for_spawn) + })?; + + let response = BranchInfoResponse::from(info); + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "branch_rename".to_string(), + title: format!( + "{} renamed branch '{}' to '{}'", + user_uid, old_name, new_name + ), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"old_name": old_name, "new_name": new_name})), + is_private: false, + }, + ) + .await; + + Ok(response) + } + + pub async fn git_branch_move( + &self, + namespace: String, + repo_name: String, + request: BranchMoveRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + // Check: source branch cannot be deleted if protected + self.check_protection_for_deletion(repo.id, &request.name) + .await?; + // Check: target branch cannot be pushed if protected + self.check_protection_for_push(repo.id, &request.new_name) + .await?; + let name = request.name.clone(); + let new_name = request.new_name.clone(); + let force = request.force; + let name_for_spawn = name.clone(); + let new_name_for_spawn = new_name.clone(); + + let info = git_spawn!(repo, domain -> { + domain.branch_move(&name_for_spawn, &new_name_for_spawn, force) + })?; + + let response = BranchInfoResponse::from(info); + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "branch_rename".to_string(), + title: format!("{} renamed branch '{}' to '{}'", user_uid, name, new_name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"old_name": name, "new_name": new_name})), + is_private: false, + }, + ) + .await; + + Ok(response) + } + + pub async fn git_branch_set_upstream( + &self, + namespace: String, + repo_name: String, + request: BranchSetUpstreamRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self + .utils_check_repo_admin(namespace, repo_name, ctx) + .await?; + let name = request.name.clone(); + let upstream = request.upstream.clone(); + + git_spawn!(repo, domain -> { + domain.branch_set_upstream(&name, upstream.as_deref()) + })?; + + Ok(()) + } + + pub async fn git_branch_is_merged( + &self, + namespace: String, + repo_name: String, + query: BranchIsMergedQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let branch = query.branch.clone(); + let into = query.into.clone(); + + let is_merged = git_spawn!(repo, domain -> { + domain.branch_is_merged(&branch, &into) + })?; + + Ok(BranchIsMergedResponse { + branch: query.branch, + into: query.into, + is_merged, + }) + } + + pub async fn git_branch_merge_base( + &self, + namespace: String, + repo_name: String, + query: BranchMergeBaseQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let branch1 = query.branch1.clone(); + let branch2 = query.branch2.clone(); + + let base = git_spawn!(repo, domain -> { + domain.branch_merge_base(&branch1, &branch2) + })?; + + Ok(BranchMergeBaseResponse { + branch1: query.branch1, + branch2: query.branch2, + base: base.to_string(), + }) + } + + pub async fn git_branch_is_ancestor( + &self, + namespace: String, + repo_name: String, + query: BranchIsAncestorQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let child = query.child.clone(); + let ancestor = query.ancestor.clone(); + + let is_ancestor = git_spawn!(repo, domain -> { + domain.branch_is_ancestor(&child, &ancestor) + })?; + + Ok(BranchIsAncestorResponse { + child: query.child, + ancestor: query.ancestor, + is_ancestor, + }) + } + + pub async fn git_branch_fast_forward( + &self, + namespace: String, + repo_name: String, + target: String, + force: Option, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let target_clone = target.clone(); + + let new_oid = git_spawn!(repo, domain -> { + domain.branch_fast_forward(&target_clone, force.unwrap_or(false)) + })?; + + Ok(BranchFastForwardResponse { + oid: new_oid.to_string(), + }) + } + + pub async fn git_branch_is_conflicted( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let is_conflicted = git_spawn!(repo, domain -> { + Ok::<_, git::GitError>(domain.branch_is_conflicted()) + })?; + + Ok(BranchIsConflictedResponse { is_conflicted }) + } +} diff --git a/libs/service/git/branch_protection.rs b/libs/service/git/branch_protection.rs new file mode 100644 index 0000000..900449e --- /dev/null +++ b/libs/service/git/branch_protection.rs @@ -0,0 +1,349 @@ +use crate::AppService; +use crate::error::AppError; +use models::pull_request::{self as pr_module, ReviewState, pull_request_review}; +use models::repos::repo_branch_protect; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct BranchProtectionResponse { + pub id: i64, + pub repo: Uuid, + pub branch: String, + pub forbid_push: bool, + pub forbid_pull: bool, + pub forbid_merge: bool, + pub forbid_deletion: bool, + pub forbid_force_push: bool, + pub forbid_tag_push: bool, + pub required_approvals: i32, + pub dismiss_stale_reviews: bool, + pub require_linear_history: bool, + pub allow_fork_syncing: bool, +} + +impl From for BranchProtectionResponse { + fn from(m: repo_branch_protect::Model) -> Self { + Self { + id: m.id, + repo: m.repo, + branch: m.branch, + forbid_push: m.forbid_push, + forbid_pull: m.forbid_pull, + forbid_merge: m.forbid_merge, + forbid_deletion: m.forbid_deletion, + forbid_force_push: m.forbid_force_push, + forbid_tag_push: m.forbid_tag_push, + required_approvals: m.required_approvals, + dismiss_stale_reviews: m.dismiss_stale_reviews, + require_linear_history: m.require_linear_history, + allow_fork_syncing: m.allow_fork_syncing, + } + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchProtectionCreateRequest { + pub branch: String, + #[serde(default)] + pub forbid_push: bool, + #[serde(default)] + pub forbid_pull: bool, + #[serde(default)] + pub forbid_merge: bool, + #[serde(default)] + pub forbid_deletion: bool, + #[serde(default)] + pub forbid_force_push: bool, + #[serde(default)] + pub forbid_tag_push: bool, + #[serde(default)] + pub required_approvals: i32, + #[serde(default)] + pub dismiss_stale_reviews: bool, + #[serde(default)] + pub require_linear_history: bool, + #[serde(default = "default_allow_fork_syncing")] + pub allow_fork_syncing: bool, +} + +fn default_allow_fork_syncing() -> bool { + true +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct BranchProtectionUpdateRequest { + pub branch: Option, + pub forbid_push: Option, + pub forbid_pull: Option, + pub forbid_merge: Option, + pub forbid_deletion: Option, + pub forbid_force_push: Option, + pub forbid_tag_push: Option, + pub required_approvals: Option, + pub dismiss_stale_reviews: Option, + pub require_linear_history: Option, + pub allow_fork_syncing: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ApprovalCheckResult { + pub enough_approvals: bool, + pub approvals: i32, + pub required: i32, + pub reviewers: Vec, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ReviewerInfo { + pub reviewer: Uuid, + pub state: String, + pub submitted_at: Option>, +} + +impl AppService { + /// List all branch protection rules for a repository. + pub async fn branch_protection_list( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result, AppError> { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rules = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(repo.id)) + .all(&self.db) + .await?; + Ok(rules + .into_iter() + .map(BranchProtectionResponse::from) + .collect()) + } + + /// Get a single branch protection rule by id. + pub async fn branch_protection_get( + &self, + namespace: String, + repo_name: String, + rule_id: i64, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rule = repo_branch_protect::Entity::find_by_id(rule_id) + .filter(repo_branch_protect::Column::Repo.eq(repo.id)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Branch protection rule not found".to_string()))?; + Ok(BranchProtectionResponse::from(rule)) + } + + /// Create a branch protection rule. + pub async fn branch_protection_create( + &self, + namespace: String, + repo_name: String, + request: BranchProtectionCreateRequest, + ctx: &Session, + ) -> Result { + let repo = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let active = repo_branch_protect::ActiveModel { + id: Default::default(), + repo: Set(repo.id), + branch: Set(request.branch.clone()), + forbid_push: Set(request.forbid_push), + forbid_pull: Set(request.forbid_pull), + forbid_merge: Set(request.forbid_merge), + forbid_deletion: Set(request.forbid_deletion), + forbid_force_push: Set(request.forbid_force_push), + forbid_tag_push: Set(request.forbid_tag_push), + required_approvals: Set(request.required_approvals), + dismiss_stale_reviews: Set(request.dismiss_stale_reviews), + require_linear_history: Set(request.require_linear_history), + allow_fork_syncing: Set(request.allow_fork_syncing), + }; + let rule = active.insert(&self.db).await?; + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + ctx.user().unwrap_or(Uuid::nil()), + super::super::project::activity::ActivityLogParams { + event_type: "branch_protection_create".to_string(), + title: format!( + "Branch protection created for '{}' on branch '{}'", + repo_name, request.branch + ), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ "branch": request.branch })), + is_private: false, + }, + ) + .await; + Ok(BranchProtectionResponse::from(rule)) + } + + /// Update a branch protection rule. + pub async fn branch_protection_update( + &self, + namespace: String, + repo_name: String, + rule_id: i64, + request: BranchProtectionUpdateRequest, + ctx: &Session, + ) -> Result { + let repo = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let rule = repo_branch_protect::Entity::find_by_id(rule_id) + .filter(repo_branch_protect::Column::Repo.eq(repo.id)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Branch protection rule not found".to_string()))?; + let mut active: repo_branch_protect::ActiveModel = rule.into(); + if let Some(v) = request.branch { + active.branch = Set(v); + } + if let Some(v) = request.forbid_push { + active.forbid_push = Set(v); + } + if let Some(v) = request.forbid_pull { + active.forbid_pull = Set(v); + } + if let Some(v) = request.forbid_merge { + active.forbid_merge = Set(v); + } + if let Some(v) = request.forbid_deletion { + active.forbid_deletion = Set(v); + } + if let Some(v) = request.forbid_force_push { + active.forbid_force_push = Set(v); + } + if let Some(v) = request.forbid_tag_push { + active.forbid_tag_push = Set(v); + } + if let Some(v) = request.required_approvals { + active.required_approvals = Set(v); + } + if let Some(v) = request.dismiss_stale_reviews { + active.dismiss_stale_reviews = Set(v); + } + if let Some(v) = request.require_linear_history { + active.require_linear_history = Set(v); + } + if let Some(v) = request.allow_fork_syncing { + active.allow_fork_syncing = Set(v); + } + let updated = active.update(&self.db).await?; + Ok(BranchProtectionResponse::from(updated)) + } + + /// Delete a branch protection rule. + pub async fn branch_protection_delete( + &self, + namespace: String, + repo_name: String, + rule_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self + .utils_check_repo_admin(namespace, repo_name, ctx) + .await?; + let deleted = repo_branch_protect::Entity::delete_many() + .filter(repo_branch_protect::Column::Id.eq(rule_id)) + .filter(repo_branch_protect::Column::Repo.eq(repo.id)) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::NotFound( + "Branch protection rule not found".to_string(), + )); + } + Ok(()) + } + + /// Check approval count for a PR against branch protection required_approvals. + pub async fn branch_protection_check_approvals( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + _ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, _ctx).await?; + let pr = pr_module::PullRequest::find() + .filter(pr_module::pull_request::Column::Repo.eq(repo.id)) + .filter(pr_module::pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Pull request not found".to_string()))?; + + // Find branch protection for the base branch + let protection = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(repo.id)) + .filter(repo_branch_protect::Column::Branch.eq(&pr.base)) + .one(&self.db) + .await?; + + let required = protection + .as_ref() + .map(|p| p.required_approvals) + .unwrap_or(0); + + if required <= 0 { + return Ok(ApprovalCheckResult { + enough_approvals: true, + approvals: 0, + required, + reviewers: vec![], + }); + } + + // Count approvals from pull_request_review table + let reviews = pull_request_review::Entity::find() + .filter(pull_request_review::Column::Repo.eq(repo.id)) + .filter(pull_request_review::Column::Number.eq(pr_number)) + .filter(pull_request_review::Column::State.eq(ReviewState::Approved.to_string())) + .all(&self.db) + .await?; + + let approvals = reviews.len() as i32; + let reviewers: Vec = reviews + .into_iter() + .map(|r| ReviewerInfo { + reviewer: r.reviewer, + state: r.state, + submitted_at: r.submitted_at, + }) + .collect(); + + Ok(ApprovalCheckResult { + enough_approvals: approvals >= required, + approvals, + required, + reviewers, + }) + } + + /// Find the branch protection rule for a given repo+branch. + pub async fn branch_protection_find( + &self, + repo_id: Uuid, + branch: &str, + ) -> Result, AppError> { + let rule = repo_branch_protect::Entity::find() + .filter(repo_branch_protect::Column::Repo.eq(repo_id)) + .filter(repo_branch_protect::Column::Branch.eq(branch)) + .one(&self.db) + .await?; + Ok(rule) + } +} diff --git a/libs/service/git/commit.rs b/libs/service/git/commit.rs new file mode 100644 index 0000000..df7aeb0 --- /dev/null +++ b/libs/service/git/commit.rs @@ -0,0 +1,1361 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{ + CommitDiffFile, CommitDiffHunk, CommitDiffStats, CommitGraph, CommitMeta, CommitRefInfo, + CommitReflogEntry, CommitSignature, CommitSort, CommitWalkOptions, +}; +use models::repos::repo; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct CommitGetQuery { + pub oid: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitMetaResponse { + pub oid: String, + pub message: String, + pub summary: String, + pub author: CommitSignatureResponse, + pub committer: CommitSignatureResponse, + pub tree_id: String, + pub parent_ids: Vec, + pub encoding: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitSignatureResponse { + pub name: String, + pub email: String, + pub time_secs: i64, + pub offset_minutes: i32, +} + +impl From for CommitSignatureResponse { + fn from(s: CommitSignature) -> Self { + Self { + name: s.name, + email: s.email, + time_secs: s.time_secs, + offset_minutes: s.offset_minutes, + } + } +} + +impl From for CommitMetaResponse { + fn from(c: CommitMeta) -> Self { + Self { + oid: c.oid.to_string(), + message: c.message, + summary: c.summary, + author: CommitSignatureResponse::from(c.author), + committer: CommitSignatureResponse::from(c.committer), + tree_id: c.tree_id.to_string(), + parent_ids: c.parent_ids.into_iter().map(|p| p.to_string()).collect(), + encoding: c.encoding, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitExistsResponse { + pub oid: String, + pub exists: bool, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitIsCommitResponse { + pub oid: String, + pub is_commit: bool, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitMessageResponse { + pub oid: String, + pub message: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitSummaryResponse { + pub oid: String, + pub summary: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitShortIdResponse { + pub oid: String, + pub short_id: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitAuthorResponse { + pub oid: String, + pub author: CommitSignatureResponse, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitTreeIdResponse { + pub oid: String, + pub tree_id: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitParentCountResponse { + pub oid: String, + pub parent_count: usize, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitParentIdsResponse { + pub oid: String, + pub parent_ids: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitIsMergeResponse { + pub oid: String, + pub is_merge: bool, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitIsTipResponse { + pub oid: String, + pub is_tip: bool, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitRefCountResponse { + pub oid: String, + pub ref_count: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitCountResponse { + pub count: usize, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitRefInfoResponse { + pub name: String, + pub target: String, + pub is_remote: bool, + pub is_tag: bool, +} + +impl From for CommitRefInfoResponse { + fn from(r: CommitRefInfo) -> Self { + Self { + name: r.name, + target: r.target.to_string(), + is_remote: r.is_remote, + is_tag: r.is_tag, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitBranchesResponse { + pub oid: String, + pub branches: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitTagsResponse { + pub oid: String, + pub tags: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitReflogEntryResponse { + pub oid_new: String, + pub oid_old: String, + pub committer_name: String, + pub committer_email: String, + pub time_secs: i64, + pub message: Option, +} + +impl From for CommitReflogEntryResponse { + fn from(e: CommitReflogEntry) -> Self { + Self { + oid_new: e.oid_new.to_string(), + oid_old: e.oid_old.to_string(), + committer_name: e.committer_name, + committer_email: e.committer_email, + time_secs: e.time_secs, + message: e.message, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitGraphResponse { + pub lines: Vec, + pub max_parents: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitGraphLineResponse { + pub oid: String, + pub graph_chars: String, + pub refs: String, + pub short_message: String, +} + +impl From for CommitGraphLineResponse { + fn from(l: git::CommitGraphLine) -> Self { + Self { + oid: l.oid.to_string(), + graph_chars: l.graph_chars, + refs: l.refs, + short_message: l.short_message, + } + } +} + +impl From for CommitGraphResponse { + fn from(g: CommitGraph) -> Self { + Self { + lines: g + .lines + .into_iter() + .map(CommitGraphLineResponse::from) + .collect(), + max_parents: g.max_parents, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitDiffStatsResponse { + pub oid: String, + pub files_changed: usize, + pub insertions: usize, + pub deletions: usize, +} + +impl From for CommitDiffStatsResponse { + fn from(s: CommitDiffStats) -> Self { + Self { + oid: String::new(), + files_changed: s.files_changed, + insertions: s.insertions, + deletions: s.deletions, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitDiffFileResponse { + pub path: Option, + pub status: String, + pub is_binary: bool, + pub size: u64, +} + +impl From for CommitDiffFileResponse { + fn from(f: CommitDiffFile) -> Self { + Self { + path: f.path, + status: f.status, + is_binary: f.is_binary, + size: f.size, + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitDiffHunkResponse { + pub old_start: u32, + pub old_lines: u32, + pub new_start: u32, + pub new_lines: u32, + pub header: String, +} + +impl From for CommitDiffHunkResponse { + fn from(h: CommitDiffHunk) -> Self { + Self { + old_start: h.old_start, + old_lines: h.old_lines, + new_start: h.new_start, + new_lines: h.new_lines, + header: h.header, + } + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitLogQuery { + pub rev: Option, + pub limit: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitWalkQuery { + pub rev: Option, + pub limit: Option, + #[serde(default)] + pub first_parent_only: bool, + #[serde(default)] + pub topological: bool, + #[serde(default)] + pub reverse: bool, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitAncestorsQuery { + pub oid: String, + pub limit: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitDescendantsQuery { + pub oid: String, + pub limit: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitResolveQuery { + pub rev: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitCherryPickRequest { + pub cherrypick_oid: String, + pub author_name: String, + pub author_email: String, + pub committer_name: String, + pub committer_email: String, + pub message: Option, + pub mainline: Option, + pub update_ref: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitCherryPickAbortRequest { + pub reset_type: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitRevertRequest { + pub revert_oid: String, + pub author_name: String, + pub author_email: String, + pub committer_name: String, + pub committer_email: String, + pub message: Option, + pub mainline: Option, + pub update_ref: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitRevertAbortRequest { + pub reset_type: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct CommitCreateResponse { + pub oid: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitCreateRequest { + pub author_name: String, + pub author_email: String, + pub committer_name: String, + pub committer_email: String, + pub message: String, + pub tree_id: String, + pub parent_ids: Vec, + pub update_ref: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitAmendRequest { + pub oid: String, + pub author_name: Option, + pub author_email: Option, + pub committer_name: Option, + pub committer_email: Option, + pub message: Option, + pub message_encoding: Option, + pub tree_id: Option, + pub update_ref: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CommitDiffQuery { + pub oid: String, +} + +macro_rules! git_spawn { + ($repo:expr, $domain:ident -> $body:expr) => {{ + let repo_clone = $repo.clone(); + tokio::task::spawn_blocking(move || { + let $domain = git::GitDomain::from_model(repo_clone)?; + $body + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from) + }}; +} + +impl AppService { + pub async fn git_commit_get( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let cache_key = format!( + "git:commit:get:{}:{}:{}", + namespace, repo_name, query.oid + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let meta = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_get(&oid) + })?; + + let response = CommitMetaResponse::from(meta); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 3600, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_exists( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let exists = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + Ok::<_, git::GitError>(domain.commit_exists(&oid)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(CommitExistsResponse { + oid: query.oid, + exists, + }) + } + + pub async fn git_commit_is_commit( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let is_commit = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + Ok::<_, git::GitError>(domain.commit_is_commit(&oid)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(CommitIsCommitResponse { + oid: query.oid, + is_commit, + }) + } + + pub async fn git_commit_message( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let message = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_message(&oid) + })?; + + Ok(CommitMessageResponse { + oid: query.oid, + message, + }) + } + + pub async fn git_commit_summary( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let summary = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_summary(&oid) + })?; + + Ok(CommitSummaryResponse { + oid: query.oid, + summary, + }) + } + + pub async fn git_commit_short_id( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let short_id = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_short_id(&oid) + })?; + + Ok(CommitShortIdResponse { + oid: query.oid, + short_id, + }) + } + + pub async fn git_commit_author( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let author = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_author(&oid) + })?; + + Ok(CommitAuthorResponse { + oid: query.oid, + author: CommitSignatureResponse::from(author), + }) + } + + pub async fn git_commit_tree_id( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let tree_id = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_tree_id(&oid) + })?; + + Ok(CommitTreeIdResponse { + oid: query.oid, + tree_id: tree_id.to_string(), + }) + } + + pub async fn git_commit_parent_count( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let parent_count = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_parent_count(&oid) + })?; + + Ok(CommitParentCountResponse { + oid: query.oid, + parent_count, + }) + } + + pub async fn git_commit_parent_ids( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let parent_ids = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_parent_ids(&oid) + })?; + + Ok(CommitParentIdsResponse { + oid: query.oid, + parent_ids: parent_ids.into_iter().map(|p| p.to_string()).collect(), + }) + } + + pub async fn git_commit_parent( + &self, + namespace: String, + repo_name: String, + oid: String, + index: usize, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = oid.clone(); + + let parent = git_spawn!(repo, domain -> { + let commit_oid = git::CommitOid::new(&oid_str); + domain.commit_parent(&commit_oid, index) + })?; + + Ok(CommitMetaResponse::from(parent)) + } + + pub async fn git_commit_first_parent( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let parent = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_first_parent(&oid) + })?; + + Ok(parent.map(CommitMetaResponse::from)) + } + + pub async fn git_commit_is_merge( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let is_merge = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_is_merge(&oid) + })?; + + Ok(CommitIsMergeResponse { + oid: query.oid, + is_merge, + }) + } + + pub async fn git_commit_log( + &self, + namespace: String, + repo_name: String, + query: CommitLogQuery, + ctx: &Session, + ) -> Result, AppError> { + let cache_key = format!( + "git:commit:log:{}:{}:{:?}:{}", + namespace, + repo_name, + query.rev, + query.limit.unwrap_or(0), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rev_clone = query.rev.clone(); + let limit = query.limit.unwrap_or(0); + + let commits = git_spawn!(repo, domain -> { + domain.commit_log(rev_clone.as_deref(), limit) + })?; + + let response: Vec = + commits.into_iter().map(CommitMetaResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_count( + &self, + namespace: String, + repo_name: String, + from: Option, + to: Option, + ctx: &Session, + ) -> Result { + let cache_key = format!( + "git:commit:count:{}:{}:{:?}:{:?}", + namespace, repo_name, from, to, + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let from_clone = from.clone(); + let to_clone = to.clone(); + + let count = git_spawn!(repo, domain -> { + domain.commit_count(from_clone.as_deref(), to_clone.as_deref()) + })?; + + let response = CommitCountResponse { count }; + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_refs( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let refs = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_refs(&oid) + })?; + + Ok(refs.into_iter().map(CommitRefInfoResponse::from).collect()) + } + + pub async fn git_commit_branches( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let branches = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_branches(&oid) + })?; + + Ok(CommitBranchesResponse { + oid: query.oid, + branches, + }) + } + + pub async fn git_commit_tags( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let tags = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_tags(&oid) + })?; + + Ok(CommitTagsResponse { + oid: query.oid, + tags, + }) + } + + pub async fn git_commit_is_tip( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let is_tip = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_is_tip(&oid) + })?; + + Ok(CommitIsTipResponse { + oid: query.oid, + is_tip, + }) + } + + pub async fn git_commit_ref_count( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let ref_count = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_ref_count(&oid) + })?; + + Ok(CommitRefCountResponse { + oid: query.oid, + ref_count, + }) + } + + pub async fn git_commit_reflog( + &self, + namespace: String, + repo_name: String, + query: CommitGetQuery, + refname: Option, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + let refname_clone = refname.clone(); + + let entries = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_reflog(&oid, refname_clone.as_deref()) + })?; + + Ok(entries + .into_iter() + .map(CommitReflogEntryResponse::from) + .collect()) + } + + pub async fn git_commit_graph( + &self, + namespace: String, + repo_name: String, + query: CommitWalkQuery, + ctx: &Session, + ) -> Result { + let cache_key = format!( + "git:commit:graph:{}:{}:{:?}:{}", + namespace, + repo_name, + query.rev, + query.limit.unwrap_or(0), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rev_clone = query.rev.clone(); + let limit = query.limit.unwrap_or(0); + + let graph = git_spawn!(repo, domain -> { + domain.commit_graph_simple(rev_clone.as_deref(), limit) + })?; + + let response = CommitGraphResponse::from(graph); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_walk( + &self, + namespace: String, + repo_name: String, + query: CommitWalkQuery, + ctx: &Session, + ) -> Result, AppError> { + let cache_key = format!( + "git:commit:walk:{}:{}:{:?}:{}:{}:{}:{}", + namespace, + repo_name, + query.rev, + query.limit.unwrap_or(0), + query.first_parent_only, + query.topological, + query.reverse, + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rev_clone = query.rev.clone(); + let limit = query.limit.unwrap_or(0); + let first_parent_only = query.first_parent_only; + let topological = query.topological; + let reverse = query.reverse; + + let sort = if topological && reverse { + CommitSort(CommitSort::TOPOLOGICAL.0 | CommitSort::TIME.0 | CommitSort::REVERSE.0) + } else if topological { + CommitSort(CommitSort::TOPOLOGICAL.0 | CommitSort::TIME.0) + } else if reverse { + CommitSort(CommitSort::TIME.0 | CommitSort::REVERSE.0) + } else { + CommitSort(CommitSort::TOPOLOGICAL.0 | CommitSort::TIME.0) + }; + + let commits = git_spawn!(repo, domain -> { + domain.commit_walk(CommitWalkOptions { + rev: rev_clone, + sort, + limit, + first_parent_only, + }) + })?; + + let response: Vec = + commits.into_iter().map(CommitMetaResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_ancestors( + &self, + namespace: String, + repo_name: String, + query: CommitAncestorsQuery, + ctx: &Session, + ) -> Result, AppError> { + let cache_key = format!( + "git:commit:ancestors:{}:{}:{}:{}", + namespace, + repo_name, + query.oid, + query.limit.unwrap_or(0), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + let limit = query.limit.unwrap_or(0); + + let commits = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_ancestors(&oid, limit) + })?; + + let response: Vec = + commits.into_iter().map(CommitMetaResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_descendants( + &self, + namespace: String, + repo_name: String, + query: CommitDescendantsQuery, + ctx: &Session, + ) -> Result, AppError> { + let cache_key = format!( + "git:commit:descendants:{}:{}:{}:{}", + namespace, + repo_name, + query.oid, + query.limit.unwrap_or(0), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + let limit = query.limit.unwrap_or(0); + + let commits = git_spawn!(repo, domain -> { + let oid = git::CommitOid::new(&oid_str); + domain.commit_descendants(&oid, limit) + })?; + + let response: Vec = + commits.into_iter().map(CommitMetaResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn git_commit_resolve_rev( + &self, + namespace: String, + repo_name: String, + query: CommitResolveQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let rev_str = query.rev.clone(); + + let oid = git_spawn!(repo, domain -> { + domain.resolve_rev(&rev_str) + })?; + + Ok(oid.to_string()) + } + + pub async fn git_commit_create( + &self, + namespace: String, + repo_name: String, + request: CommitCreateRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let parent_ids: Vec<_> = request + .parent_ids + .iter() + .map(|p| git::CommitOid::new(p)) + .collect(); + let author = git::CommitSignature { + name: request.author_name, + email: request.author_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let committer = git::CommitSignature { + name: request.committer_name, + email: request.committer_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let tree_id = git::CommitOid::new(&request.tree_id); + let update_ref = request.update_ref.clone(); + + let oid = git_spawn!(repo, domain -> { + domain.commit_create( + update_ref.as_deref(), + &author, + &committer, + &request.message, + &tree_id, + &parent_ids, + ) + })?; + + Ok(CommitCreateResponse { + oid: oid.to_string(), + }) + } + + pub async fn git_commit_amend( + &self, + namespace: String, + repo_name: String, + request: CommitAmendRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let oid = git::CommitOid::new(&request.oid); + let author = if request.author_name.is_some() && request.author_email.is_some() { + Some(git::CommitSignature { + name: request.author_name.unwrap(), + email: request.author_email.unwrap(), + time_secs: 0, + offset_minutes: 0, + }) + } else { + None + }; + let committer = + if request.committer_name.is_some() && request.committer_email.is_some() { + Some(git::CommitSignature { + name: request.committer_name.unwrap(), + email: request.committer_email.unwrap(), + time_secs: 0, + offset_minutes: 0, + }) + } else { + None + }; + let tree_id = request.tree_id.as_ref().map(|t| git::CommitOid::new(t)); + let update_ref = request.update_ref.clone(); + let message_encoding = request.message_encoding.clone(); + let message = request.message.clone(); + + let new_oid = git_spawn!(repo, domain -> { + domain.commit_amend( + &oid, + update_ref.as_deref(), + author.as_ref(), + committer.as_ref(), + message_encoding.as_deref(), + message.as_deref(), + tree_id.as_ref(), + ) + })?; + + Ok(CommitCreateResponse { + oid: new_oid.to_string(), + }) + } + + pub async fn git_commit_cherry_pick( + &self, + namespace: String, + repo_name: String, + request: CommitCherryPickRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let cherrypick_oid = git::CommitOid::new(&request.cherrypick_oid); + let author = git::CommitSignature { + name: request.author_name, + email: request.author_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let committer = git::CommitSignature { + name: request.committer_name, + email: request.committer_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let message = request.message.clone(); + let mainline = request.mainline.unwrap_or(0); + let update_ref = request.update_ref.clone(); + + let oid = git_spawn!(repo, domain -> { + domain.commit_cherry_pick( + &cherrypick_oid, + &author, + &committer, + message.as_deref(), + mainline, + update_ref.as_deref(), + ) + })?; + + Ok(CommitCreateResponse { + oid: oid.to_string(), + }) + } + + pub async fn git_commit_cherry_pick_abort( + &self, + namespace: String, + repo_name: String, + request: CommitCherryPickAbortRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let reset_type = request.reset_type.clone(); + + git_spawn!(repo, domain -> { + domain.commit_cherry_pick_abort(reset_type.as_deref().unwrap_or("hard")) + })?; + + Ok(()) + } + + pub async fn git_commit_revert( + &self, + namespace: String, + repo_name: String, + request: CommitRevertRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let revert_oid = git::CommitOid::new(&request.revert_oid); + let author = git::CommitSignature { + name: request.author_name, + email: request.author_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let committer = git::CommitSignature { + name: request.committer_name, + email: request.committer_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let message = request.message.clone(); + let mainline = request.mainline.unwrap_or(0); + let update_ref = request.update_ref.clone(); + + let oid = git_spawn!(repo, domain -> { + domain.commit_revert( + &revert_oid, + &author, + &committer, + message.as_deref(), + mainline, + update_ref.as_deref(), + ) + })?; + + Ok(CommitCreateResponse { + oid: oid.to_string(), + }) + } + + pub async fn git_commit_revert_abort( + &self, + namespace: String, + repo_name: String, + request: CommitRevertAbortRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self.utils_check_repo_admin(namespace, repo_name, ctx).await?; + let reset_type = request.reset_type.clone(); + + git_spawn!(repo, domain -> { + domain.commit_revert_abort(reset_type.as_deref().unwrap_or("hard")) + })?; + + Ok(()) + } +} diff --git a/libs/service/git/contributors.rs b/libs/service/git/contributors.rs new file mode 100644 index 0000000..ad06ed1 --- /dev/null +++ b/libs/service/git/contributors.rs @@ -0,0 +1,132 @@ +use crate::AppService; +use crate::error::AppError; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; +use std::collections::HashMap; + +#[derive(Debug, Clone, Deserialize)] +pub struct ContributorsQuery { + #[serde(default = "default_limit")] + pub limit: usize, + #[serde(default)] + pub ref_name: Option, +} + +fn default_limit() -> usize { + 100 +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ContributorStats { + pub name: String, + pub email: String, + pub commits: usize, + pub first_commit_at: Option, + pub last_commit_at: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ContributorsResponse { + pub total: usize, + pub contributors: Vec, +} + +struct ContributorEntry { + name: String, + email: String, + commits: usize, + first_commit_at: Option, + last_commit_at: Option, +} + +impl AppService { + pub async fn git_contributors( + &self, + namespace: String, + repo_name: String, + query: ContributorsQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!( + "git:contributors:{}:{}:{:?}:{}", + namespace, repo_name, query.ref_name, query.limit, + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo_clone = repo.clone(); + let ref_name_clone = query.ref_name.clone(); + + let commits = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + domain.commit_log(ref_name_clone.as_deref(), 0, query.limit) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let mut author_map: HashMap = HashMap::new(); + for commit in commits { + let author = commit.author; + let time = author.time_secs; + // Use email as primary key (case-insensitive) for deduplication. + // If the same person uses multiple emails, they appear as separate contributors — + // this is the best we can do without an external identity service. + let key = author.email.to_lowercase(); + let entry = author_map.entry(key).or_insert_with(|| ContributorEntry { + name: author.name.clone(), + email: author.email.clone(), + commits: 0, + first_commit_at: None, + last_commit_at: None, + }); + entry.commits += 1; + entry.first_commit_at = + Some(entry.first_commit_at.map(|f| f.min(time)).unwrap_or(time)); + entry.last_commit_at = Some(entry.last_commit_at.map(|l| l.max(time)).unwrap_or(time)); + } + + let mut contributors: Vec = author_map + .into_values() + .map(|e| ContributorStats { + name: e.name, + email: e.email, + commits: e.commits, + first_commit_at: e.first_commit_at, + last_commit_at: e.last_commit_at, + }) + .collect(); + contributors.sort_by(|a, b| b.commits.cmp(&a.commits)); + + let total = contributors.len(); + let response = ContributorsResponse { + total, + contributors, + }; + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } +} diff --git a/libs/service/git/diff.rs b/libs/service/git/diff.rs new file mode 100644 index 0000000..8ffb644 --- /dev/null +++ b/libs/service/git/diff.rs @@ -0,0 +1,632 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{DiffDelta, DiffOptions, DiffResult, DiffStats, GitError, SideBySideDiffResult}; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema, utoipa::IntoParams)] +pub struct DiffQuery { + #[serde(default)] + pub old_tree: Option, + #[serde(default)] + pub new_tree: Option, + #[serde(default)] + pub context_lines: Option, + #[serde(default)] + pub pathspec: Option>, + #[serde(default)] + pub include_untracked: Option, + #[serde(default)] + pub include_ignored: Option, + #[serde(default)] + pub ignore_whitespace: Option, + #[serde(default)] + pub force_text: Option, + #[serde(default)] + pub reverse: Option, +} + +impl DiffQuery { + fn to_diff_options(&self) -> DiffOptions { + let mut opts = DiffOptions::new(); + if let Some(n) = self.context_lines { + opts = opts.context_lines(n); + } + if let Some(ref paths) = self.pathspec { + for p in paths { + opts = opts.pathspec(p); + } + } + if self.include_untracked.unwrap_or(false) { + opts = opts.include_untracked(); + } + if self.include_ignored.unwrap_or(false) { + opts = opts.include_ignored(); + } + if self.ignore_whitespace.unwrap_or(false) { + opts = opts.ignore_whitespace(); + } + if self.force_text.unwrap_or(false) { + opts = opts.force_text(); + } + if self.reverse.unwrap_or(false) { + opts = opts.reverse(); + } + opts + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct DiffCommitQuery { + #[serde(default)] + pub commit: String, + #[serde(flatten)] + #[serde(default)] + pub diff_opts: DiffQuery, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffStatsResponse { + pub files_changed: usize, + pub insertions: usize, + pub deletions: usize, +} + +impl From for DiffStatsResponse { + fn from(s: DiffStats) -> Self { + Self { + files_changed: s.files_changed, + insertions: s.insertions, + deletions: s.deletions, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffDeltaResponse { + pub status: String, + pub old_file: DiffFileResponse, + pub new_file: DiffFileResponse, + pub nfiles: u16, + pub hunks: Vec, + pub lines: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffFileResponse { + pub oid: Option, + pub path: Option, + pub size: u64, + pub is_binary: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffHunkResponse { + pub old_start: u32, + pub old_lines: u32, + pub new_start: u32, + pub new_lines: u32, + pub header: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffLineResponse { + pub content: String, + pub origin: String, + pub old_lineno: Option, + pub new_lineno: Option, + pub num_lines: u32, + pub content_offset: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DiffResultResponse { + pub stats: DiffStatsResponse, + pub deltas: Vec, +} + +impl From for DiffResultResponse { + fn from(r: DiffResult) -> Self { + Self { + stats: DiffStatsResponse::from(r.stats), + deltas: r.deltas.into_iter().map(DiffDeltaResponse::from).collect(), + } + } +} + +impl From for DiffDeltaResponse { + fn from(d: DiffDelta) -> Self { + Self { + status: format!("{:?}", d.status).to_lowercase(), + old_file: DiffFileResponse { + oid: d.old_file.oid.map(|o| o.to_string()), + path: d.old_file.path, + size: d.old_file.size, + is_binary: d.old_file.is_binary, + }, + new_file: DiffFileResponse { + oid: d.new_file.oid.map(|o| o.to_string()), + path: d.new_file.path, + size: d.new_file.size, + is_binary: d.new_file.is_binary, + }, + nfiles: d.nfiles, + hunks: d + .hunks + .into_iter() + .map(|h| DiffHunkResponse { + old_start: h.old_start, + old_lines: h.old_lines, + new_start: h.new_start, + new_lines: h.new_lines, + header: h.header, + }) + .collect(), + lines: d + .lines + .into_iter() + .map(|l| DiffLineResponse { + content: l.content, + origin: l.origin.to_string(), + old_lineno: l.old_lineno, + new_lineno: l.new_lineno, + num_lines: l.num_lines, + content_offset: l.content_offset, + }) + .collect(), + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct DiffPatchIdResponse { + pub old_tree: String, + pub new_tree: String, + pub patch_id: String, +} + +// --------------------------------------------------------------------------- +// Side-by-side diff response types +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema)] +#[serde(rename_all = "lowercase")] +pub enum SideBySideChangeTypeResponse { + Unchanged, + Added, + Removed, + Modified, + Empty, +} + +impl From for SideBySideChangeTypeResponse { + fn from(v: crate::git::SideBySideChangeType) -> Self { + match v { + crate::git::SideBySideChangeType::Unchanged => Self::Unchanged, + crate::git::SideBySideChangeType::Added => Self::Added, + crate::git::SideBySideChangeType::Removed => Self::Removed, + crate::git::SideBySideChangeType::Modified => Self::Modified, + crate::git::SideBySideChangeType::Empty => Self::Empty, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SideBySideLineResponse { + pub left_line_no: Option, + pub right_line_no: Option, + pub left_content: String, + pub right_content: String, + pub change_type: SideBySideChangeTypeResponse, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SideBySideFileResponse { + pub path: String, + pub additions: usize, + pub deletions: usize, + pub is_binary: bool, + pub is_rename: bool, + pub lines: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SideBySideDiffResponse { + pub files: Vec, + pub total_additions: usize, + pub total_deletions: usize, +} + +impl From for SideBySideDiffResponse { + fn from(r: SideBySideDiffResult) -> Self { + Self { + files: r + .files + .into_iter() + .map(|f| SideBySideFileResponse { + path: f.path, + additions: f.additions, + deletions: f.deletions, + is_binary: f.is_binary, + is_rename: f.is_rename, + lines: f + .lines + .into_iter() + .map(|l| SideBySideLineResponse { + left_line_no: l.left_line_no, + right_line_no: l.right_line_no, + left_content: l.left_content, + right_content: l.right_content, + change_type: l.change_type.into(), + }) + .collect(), + }) + .collect(), + total_additions: r.total_additions, + total_deletions: r.total_deletions, + } + } +} + +/// Query parameters for side-by-side diff. +#[derive(Debug, Clone, Deserialize, utoipa::IntoParams, utoipa::ToSchema)] +pub struct SideBySideDiffQuery { + /// OID (SHA) of the base / old commit or tree. + pub base: String, + /// OID (SHA) of the head / new commit or tree. + pub head: String, + /// Optional path filter — only include files matching this prefix. + #[serde(default)] + pub pathspec: Option>, + /// Number of context lines around changes (default 3). + #[serde(default)] + pub context_lines: Option, +} + +impl AppService { + pub async fn git_diff_tree_to_tree( + &self, + namespace: String, + repo_name: String, + query: DiffQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!( + "git:diff:{}:{}:{}:{}", + namespace, + repo_name, + query.old_tree.as_deref().unwrap_or(""), + query.new_tree.as_deref().unwrap_or(""), + ); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let old_tree = query + .old_tree + .as_ref() + .map(|s| git::CommitOid::new(s.as_str())); + let new_tree = git::CommitOid::new( + query + .new_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("new_tree is required".into()))?, + ); + let opts = query.to_diff_options(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_tree_to_tree(old_tree.as_ref(), Some(&new_tree), Some(opts)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response = DiffResultResponse::from(result); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_diff_commit_to_workdir( + &self, + namespace: String, + repo_name: String, + query: DiffCommitQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!("git:diff:c2wd:{}:{}:{}", namespace, repo_name, query.commit,); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let commit = git::CommitOid::new(&query.commit); + let opts = query.diff_opts.to_diff_options(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_commit_to_workdir(&commit, Some(opts)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response = DiffResultResponse::from(result); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_diff_commit_to_index( + &self, + namespace: String, + repo_name: String, + query: DiffCommitQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!( + "git:diff:c2idx:{}:{}:{}", + namespace, repo_name, query.commit, + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let commit = git::CommitOid::new(&query.commit); + let opts = query.diff_opts.to_diff_options(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_commit_to_index(&commit, Some(opts)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response = DiffResultResponse::from(result); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_diff_workdir_to_index( + &self, + namespace: String, + repo_name: String, + query: DiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let opts = query.to_diff_options(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_workdir_to_index(Some(opts)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(DiffResultResponse::from(result)) + } + + pub async fn git_diff_index_to_tree( + &self, + namespace: String, + repo_name: String, + query: DiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let tree = git::CommitOid::new( + query + .new_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("new_tree is required".into()))?, + ); + let opts = query.to_diff_options(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_index_to_tree(&tree, Some(opts)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(DiffResultResponse::from(result)) + } + + pub async fn git_diff_stats( + &self, + namespace: String, + repo_name: String, + query: DiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let old_tree = git::CommitOid::new( + query + .old_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("old_tree is required".into()))?, + ); + let new_tree = git::CommitOid::new( + query + .new_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("new_tree is required".into()))?, + ); + + let stats = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_stats(&old_tree, &new_tree) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(DiffStatsResponse::from(stats)) + } + + pub async fn git_diff_patch_id( + &self, + namespace: String, + repo_name: String, + query: DiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let old_tree = git::CommitOid::new( + query + .old_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("old_tree is required".into()))?, + ); + let new_tree = git::CommitOid::new( + query + .new_tree + .as_deref() + .ok_or_else(|| AppError::BadRequest("new_tree is required".into()))?, + ); + + let patch_id = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.diff_patch_id(&old_tree, &new_tree) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(DiffPatchIdResponse { + old_tree: query.old_tree.unwrap_or_default(), + new_tree: query.new_tree.unwrap_or_default(), + patch_id, + }) + } + + /// Generate a side-by-side diff between two commits or trees. + pub async fn git_diff_side_by_side( + &self, + namespace: String, + repo_name: String, + query: SideBySideDiffQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let cache_key = format!( + "git:diff:sbs:{}:{}:{}:{}", + namespace, repo_name, query.base, query.head, + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + + let base = git::CommitOid::new(&query.base); + let head = git::CommitOid::new(&query.head); + + let mut opts = DiffOptions::new(); + if let Some(n) = query.context_lines { + opts = opts.context_lines(n); + } + if let Some(ref paths) = query.pathspec { + for p in paths { + opts = opts.pathspec(p); + } + } + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let diff_result = domain.diff_tree_to_tree(Some(&base), Some(&head), Some(opts))?; + Ok::<_, GitError>(git::diff_to_side_by_side(&diff_result)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response = SideBySideDiffResponse::from(result); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } +} diff --git a/libs/service/git/init.rs b/libs/service/git/init.rs new file mode 100644 index 0000000..650b774 --- /dev/null +++ b/libs/service/git/init.rs @@ -0,0 +1,66 @@ +use crate::AppService; +use crate::error::AppError; +use serde::{Deserialize, Serialize}; +use session::Session; +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct GitInitResponse { + pub path: String, + pub is_bare: bool, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct GitInitRequest { + pub path: String, + #[serde(default)] + pub bare: bool, + #[serde(default)] + pub initial_branch: Option, +} +impl AppService { + pub async fn git_init_bare( + &self, + request: GitInitRequest, + ) -> Result { + let domain = git::GitDomain::init_bare(&request.path).map_err(AppError::from)?; + Ok(GitInitResponse { + path: domain.repo().path().to_string_lossy().to_string(), + is_bare: true, + }) + } + + pub async fn git_open(&self, path: String) -> Result { + let domain = git::GitDomain::open(&path).map_err(AppError::from)?; + Ok(GitInitResponse { + path: domain.repo().path().to_string_lossy().to_string(), + is_bare: domain.repo().is_bare(), + }) + } + + pub async fn git_open_workdir(&self, path: String) -> Result { + let domain = git::GitDomain::open_workdir(&path).map_err(AppError::from)?; + Ok(GitInitResponse { + path: domain.repo().path().to_string_lossy().to_string(), + is_bare: true, + }) + } + + pub async fn git_is_repo(&self, path: String) -> Result { + match git::GitDomain::open(&path) { + Ok(_) => Ok(true), + Err(git::GitError::NotFound(_)) => Ok(false), + Err(git::GitError::IoError(_)) => Ok(false), + // Other errors (permission denied, corruption, etc.) indicate an abnormal + // state that the caller should be aware of. + Err(e) => Err(AppError::from(e)), + } + } + + pub async fn git_repo_path( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + Ok(repo.storage_path) + } +} diff --git a/libs/service/git/mod.rs b/libs/service/git/mod.rs new file mode 100644 index 0000000..e7b0193 --- /dev/null +++ b/libs/service/git/mod.rs @@ -0,0 +1,20 @@ +pub use git::*; + +mod blocking; + +pub mod archive; +pub mod blame; +pub mod blob; +pub mod branch; +pub mod branch_protection; +pub mod commit; +pub mod contributors; +pub mod diff; +pub mod init; +pub mod refs; +pub mod repo; +pub mod star; +pub mod tag; +pub mod tree; +pub mod watch; +pub mod webhook; diff --git a/libs/service/git/refs.rs b/libs/service/git/refs.rs new file mode 100644 index 0000000..e34bd4f --- /dev/null +++ b/libs/service/git/refs.rs @@ -0,0 +1,356 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::RefInfo; +use models::repos::repo; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, utoipa::IntoParams)] +pub struct RefListQuery { + pub pattern: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct RefGetQuery { + pub name: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct RefCreateRequest { + pub name: String, + pub oid: String, + #[serde(default)] + pub force: bool, + pub message: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct RefDeleteQuery { + pub name: String, +} + +#[derive(Debug, Clone, Deserialize, utoipa::IntoParams)] +pub struct RefRenameQuery { + pub old_name: String, + pub new_name: String, + #[serde(default)] + pub force: bool, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct RefUpdateRequest { + pub name: String, + pub new_oid: String, + pub expected_oid: Option, + pub message: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct RefTargetQuery { + pub name: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct RefInfoResponse { + pub name: String, + pub oid: Option, + pub target: Option, + pub is_symbolic: bool, + pub is_branch: bool, + pub is_remote: bool, + pub is_tag: bool, + pub is_note: bool, +} + +impl From for RefInfoResponse { + fn from(r: RefInfo) -> Self { + Self { + name: r.name, + oid: r.oid.map(|o| o.to_string()), + target: r.target.map(|t| t.to_string()), + is_symbolic: r.is_symbolic, + is_branch: r.is_branch, + is_remote: r.is_remote, + is_tag: r.is_tag, + is_note: r.is_note, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RefExistsResponse { + pub name: String, + pub exists: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RefTargetResponse { + pub name: String, + pub oid: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RefDeleteResponse { + pub name: String, + pub oid: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct RefUpdateResponse { + pub name: String, + pub old_oid: Option, + pub new_oid: Option, +} + +impl AppService { + pub async fn git_ref_list( + &self, + namespace: String, + repo_name: String, + query: RefListQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!( + "git:ref:list:{}:{}:{}", + namespace, + repo_name, + query.pattern.as_deref().unwrap_or("*") + ); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let repo_clone = repo.clone(); + let pattern_clone = query.pattern.clone(); + + let refs = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo_clone)?; + domain.ref_list(pattern_clone.as_deref()) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response: Vec = refs.into_iter().map(RefInfoResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_ref_get( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let info = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_get(&name) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(RefInfoResponse::from(info)) + } + + pub async fn git_ref_create( + &self, + namespace: String, + repo_name: String, + request: RefCreateRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let name = request.name.clone(); + let oid = git::CommitOid::new(&request.oid); + let force = request.force; + let message = request.message.clone(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_create(&name, oid, force, message.as_deref()) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("git:ref:list:{}:{}:*", namespace, repo_name); + if let Err(e) = conn.del::(key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + + Ok(RefUpdateResponse { + name: result.name, + old_oid: result.old_oid.map(|o| o.to_string()), + new_oid: result.new_oid.map(|o| o.to_string()), + }) + } + + pub async fn git_ref_delete( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let name_clone = name.clone(); + let oid = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_delete(&name_clone) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("git:ref:list:{}:{}:*", namespace, repo_name); + if let Err(e) = conn.del::(key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + + Ok(RefDeleteResponse { + name, + oid: oid.to_string(), + }) + } + + pub async fn git_ref_rename( + &self, + namespace: String, + repo_name: String, + query: RefRenameQuery, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let old_name = query.old_name.clone(); + let new_name = query.new_name.clone(); + let force = query.force; + + let info = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_rename(&old_name, &new_name, force) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("git:ref:list:{}:{}:*", namespace, repo_name); + if let Err(e) = conn.del::(key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + + Ok(RefInfoResponse::from(info)) + } + + pub async fn git_ref_update( + &self, + namespace: String, + repo_name: String, + request: RefUpdateRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace, repo_name, ctx) + .await?; + let name = request.name.clone(); + let new_oid = git::CommitOid::new(&request.new_oid); + let expected_oid = request.expected_oid.map(|o| git::CommitOid::new(&o)); + let message = request.message.clone(); + + let result = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_update(&name, new_oid, expected_oid, message.as_deref()) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(RefUpdateResponse { + name: result.name, + old_oid: result.old_oid.map(|o| o.to_string()), + new_oid: result.new_oid.map(|o| o.to_string()), + }) + } + + pub async fn git_ref_exists( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let name_clone = name.clone(); + + let exists = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + Ok::<_, git::GitError>(domain.ref_exists(&name_clone)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(RefExistsResponse { name, exists }) + } + + pub async fn git_ref_target( + &self, + namespace: String, + repo_name: String, + name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let name_clone = name.clone(); + + let oid = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + domain.ref_target(&name_clone) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(RefTargetResponse { + name, + oid: oid.map(|o| o.to_string()), + }) + } +} diff --git a/libs/service/git/repo.rs b/libs/service/git/repo.rs new file mode 100644 index 0000000..f65650e --- /dev/null +++ b/libs/service/git/repo.rs @@ -0,0 +1,463 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::MergeOptions; +use models::repos::repo; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct DescriptionQuery { + pub description: String, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct DescriptionResponse { + pub description: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct ConfigGetQuery { + pub key: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct ConfigSetRequest { + pub key: String, + pub value: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct ConfigDeleteQuery { + pub key: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct ConfigEntriesQuery { + pub prefix: Option, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ConfigEntryResponse { + pub name: String, + pub value: String, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ConfigSnapshotResponse { + pub entries: Vec, +} +impl From for ConfigEntryResponse { + fn from(e: git::ConfigEntry) -> Self { + Self { + name: e.name, + value: e.value, + } + } +} +impl From for ConfigSnapshotResponse { + fn from(s: git::ConfigSnapshot) -> Self { + Self { + entries: s + .entries + .into_iter() + .map(ConfigEntryResponse::from) + .collect(), + } + } +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct GitUpdateRepoRequest { + pub default_branch: Option, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ConfigBoolResponse { + pub key: String, + pub value: bool, +} +#[derive(Debug, Clone, Deserialize)] +pub struct MergeAnalysisQuery { + pub their_oid: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct MergeRefAnalysisQuery { + pub ref_name: String, + pub their_oid: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct MergeCommitsRequest { + pub local_oid: String, + pub remote_oid: String, + #[serde(default)] + pub find_renames: bool, + #[serde(default)] + pub fail_on_conflict: bool, + #[serde(default)] + pub skip_reuc: bool, + #[serde(default)] + pub no_recursive: bool, + #[serde(default = "default_rename_threshold")] + pub rename_threshold: u32, + #[serde(default)] + pub target_limit: u32, + #[serde(default)] + pub recursion_limit: u32, +} +fn default_rename_threshold() -> u32 { + 50 +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct MergeTreesRequest { + pub ancestor_oid: String, + pub our_oid: String, + pub their_oid: String, + #[serde(default)] + pub find_renames: bool, + #[serde(default)] + pub fail_on_conflict: bool, + #[serde(default)] + pub skip_reuc: bool, + #[serde(default)] + pub no_recursive: bool, + #[serde(default = "default_rename_threshold")] + pub rename_threshold: u32, + #[serde(default)] + pub target_limit: u32, + #[serde(default)] + pub recursion_limit: u32, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct MergeAnalysisResponse { + pub analysis: MergeAnalysisResultInner, + pub preference: MergePreferenceResultInner, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct MergeAnalysisResultInner { + pub is_none: bool, + pub is_normal: bool, + pub is_up_to_date: bool, + pub is_fast_forward: bool, + pub is_unborn: bool, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct MergePreferenceResultInner { + pub is_none: bool, + pub is_no_fast_forward: bool, + pub is_fastforward_only: bool, +} +impl From for MergeAnalysisResultInner { + fn from(r: git::MergeAnalysisResult) -> Self { + Self { + is_none: r.is_none, + is_normal: r.is_normal, + is_up_to_date: r.is_up_to_date, + is_fast_forward: r.is_fast_forward, + is_unborn: r.is_unborn, + } + } +} +impl From for MergePreferenceResultInner { + fn from(r: git::MergePreferenceResult) -> Self { + Self { + is_none: r.is_none, + is_no_fast_forward: r.is_no_fast_forward, + is_fastforward_only: r.is_fastforward_only, + } + } +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct MergeheadInfoResponse { + pub oid: String, +} +impl From for MergeheadInfoResponse { + fn from(h: git::MergeheadInfo) -> Self { + Self { + oid: h.oid.to_string(), + } + } +} +impl AppService { + pub async fn git_description_get( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let description = domain.description_get()?; + Ok(DescriptionResponse { description }) + } + + pub async fn git_description_set( + &self, + namespace: String, + repo_name: String, + query: DescriptionQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain.description_set(&query.description)?; + Ok(DescriptionResponse { + description: query.description, + }) + } + + pub async fn git_description_reset( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain.description_reset()?; + Ok(DescriptionResponse { + description: "Unnamed repository".to_string(), + }) + } + + pub async fn git_description_exists( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + Ok(domain.description_exists()) + } + + pub async fn git_config_entries( + &self, + namespace: String, + repo_name: String, + query: ConfigEntriesQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let snapshot = domain + .config_entries(query.prefix.as_deref()) + .map_err(AppError::from)?; + Ok(ConfigSnapshotResponse::from(snapshot)) + } + + pub async fn git_config_get( + &self, + namespace: String, + repo_name: String, + query: ConfigGetQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain.config_get(&query.key).map_err(AppError::from) + } + + pub async fn git_config_set( + &self, + namespace: String, + repo_name: String, + request: ConfigSetRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain + .config_set(&request.key, &request.value) + .map_err(AppError::from) + } + + pub async fn git_config_delete( + &self, + namespace: String, + repo_name: String, + query: ConfigDeleteQuery, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain.config_delete(&query.key).map_err(AppError::from) + } + + pub async fn git_config_has( + &self, + namespace: String, + repo_name: String, + query: ConfigGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let exists = domain.config_has(&query.key).map_err(AppError::from)?; + Ok(ConfigBoolResponse { + key: query.key, + value: exists, + }) + } + + pub async fn git_merge_analysis( + &self, + namespace: String, + repo_name: String, + query: MergeAnalysisQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let their_oid = git::CommitOid::new(&query.their_oid); + let (analysis, preference) = domain.merge_analysis(&their_oid).map_err(AppError::from)?; + Ok(MergeAnalysisResponse { + analysis: MergeAnalysisResultInner::from(analysis), + preference: MergePreferenceResultInner::from(preference), + }) + } + + pub async fn git_merge_analysis_for_ref( + &self, + namespace: String, + repo_name: String, + query: MergeRefAnalysisQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let their_oid = git::CommitOid::new(&query.their_oid); + let (analysis, preference) = domain + .merge_analysis_for_ref(&query.ref_name, &their_oid) + .map_err(AppError::from)?; + Ok(MergeAnalysisResponse { + analysis: MergeAnalysisResultInner::from(analysis), + preference: MergePreferenceResultInner::from(preference), + }) + } + + pub async fn git_merge_base( + &self, + namespace: String, + repo_name: String, + oid1: String, + oid2: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + let base = domain + .merge_base(&git::CommitOid::new(&oid1), &git::CommitOid::new(&oid2)) + .map_err(AppError::from)?; + Ok(base.to_string()) + } + + pub async fn git_merge_commits( + &self, + namespace: String, + repo_name: String, + request: MergeCommitsRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let mut opts = MergeOptions::new(); + opts = opts.find_renames(request.find_renames); + opts = opts.fail_on_conflict(request.fail_on_conflict); + opts = opts.skip_reuc(request.skip_reuc); + opts = opts.no_recursive(request.no_recursive); + opts = opts.rename_threshold(request.rename_threshold); + opts = opts.target_limit(request.target_limit); + opts = opts.recursion_limit(request.recursion_limit); + let domain = git::GitDomain::from_model(repo)?; + domain + .merge_commits( + &git::CommitOid::new(&request.local_oid), + &git::CommitOid::new(&request.remote_oid), + Some(opts), + ) + .map_err(AppError::from) + } + + pub async fn git_merge_trees( + &self, + namespace: String, + repo_name: String, + request: MergeTreesRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let mut opts = MergeOptions::new(); + opts = opts.find_renames(request.find_renames); + opts = opts.fail_on_conflict(request.fail_on_conflict); + opts = opts.skip_reuc(request.skip_reuc); + opts = opts.no_recursive(request.no_recursive); + opts = opts.rename_threshold(request.rename_threshold); + opts = opts.target_limit(request.target_limit); + opts = opts.recursion_limit(request.recursion_limit); + let domain = git::GitDomain::from_model(repo)?; + domain + .merge_trees( + &git::CommitOid::new(&request.ancestor_oid), + &git::CommitOid::new(&request.our_oid), + &git::CommitOid::new(&request.their_oid), + Some(opts), + ) + .map_err(AppError::from) + } + + pub async fn git_merge_abort( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + domain.merge_abort().map_err(AppError::from) + } + + pub async fn git_merge_is_in_progress( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + Ok(domain.merge_is_in_progress()) + } + + pub async fn git_mergehead_list( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let mut domain = git::GitDomain::from_model(repo)?; + let heads = domain.mergehead_list().map_err(AppError::from)?; + Ok(heads.into_iter().map(MergeheadInfoResponse::from).collect()) + } + + pub async fn git_merge_is_conflicted( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let domain = git::GitDomain::from_model(repo)?; + Ok(domain.merge_is_conflicted()) + } + + pub async fn git_update_repo( + &self, + namespace: String, + repo_name: String, + params: GitUpdateRepoRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let txn = self.db.begin().await?; + let mut active: repo::ActiveModel = repo.clone().into_active_model(); + if let Some(default_branch) = params.default_branch { + active.default_branch = Set(default_branch); + } + active.update(&txn).await?; + txn.commit().await?; + Ok(()) + } +} diff --git a/libs/service/git/star.rs b/libs/service/git/star.rs new file mode 100644 index 0000000..43e71f3 --- /dev/null +++ b/libs/service/git/star.rs @@ -0,0 +1,212 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::repos::repo as repo_model; +use models::repos::{RepoStar, repo_star}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct StarCountResponse { + pub count: i64, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct StarUserInfo { + pub uid: String, + pub username: String, + pub avatar_url: String, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct StarUserListResponse { + pub users: Vec, +} +impl AppService { + pub async fn git_star( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let existing = RepoStar::find() + .filter(repo_star::Column::User.eq(user_uid)) + .filter(repo_star::Column::Repo.eq(repo.id)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::InternalServerError("already starred".to_string())); + } + RepoStar::insert(repo_star::ActiveModel { + id: Default::default(), + repo: Set(repo.id), + user: Set(user_uid), + created_at: Set(Utc::now()), + }) + .exec(&self.db) + .await?; + + // Log activity: need to look up project_id from repo + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "repo_star".to_string(), + title: format!("{} starred repository '{}'", user_uid, repo_name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "repo_name": repo_name, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn git_unstar( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let deleted = RepoStar::delete_many() + .filter(repo_star::Column::User.eq(user_uid)) + .filter(repo_star::Column::Repo.eq(repo.id)) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::InternalServerError("not starred".to_string())); + } + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "repo_unstar".to_string(), + title: format!("{} unstarred repository '{}'", user_uid, repo_name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"repo_name": repo_name})), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn git_is_starred( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let existing = RepoStar::find() + .filter(repo_star::Column::User.eq(user_uid)) + .filter(repo_star::Column::Repo.eq(repo.id)) + .one(&self.db) + .await?; + Ok(existing.is_some()) + } + + pub async fn git_star_count( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let count = RepoStar::find() + .filter(repo_star::Column::Repo.eq(repo.id)) + .count(&self.db) + .await?; + Ok(StarCountResponse { + count: count as i64, + }) + } + + pub async fn git_star_user_list( + &self, + namespace: String, + repo_name: String, + pager: crate::Pager, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let page = std::cmp::Ord::max(pager.page, 1); + let par_page = std::cmp::Ord::min(std::cmp::Ord::max(pager.par_page, 1), 1000); + let offset_val = (page - 1).saturating_mul(par_page); + let offset = offset_val as u64; + let stars = RepoStar::find() + .filter(repo_star::Column::Repo.eq(repo.id)) + .order_by_desc(repo_star::Column::CreatedAt) + .limit(par_page as u64) + .offset(offset) + .all(&self.db) + .await?; + let user_uids: Vec = stars.into_iter().map(|s| s.user).collect(); + if user_uids.is_empty() { + return Ok(StarUserListResponse { users: vec![] }); + } + let users = models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(user_uids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| StarUserInfo { + uid: u.uid.to_string(), + username: u.username, + avatar_url: u.avatar_url.unwrap_or_default(), + }) + .collect(); + Ok(StarUserListResponse { users }) + } +} diff --git a/libs/service/git/tag.rs b/libs/service/git/tag.rs new file mode 100644 index 0000000..ee14d8f --- /dev/null +++ b/libs/service/git/tag.rs @@ -0,0 +1,648 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{TagInfo, TagSummary}; +use models::repos::repo; +use models::repos::repo as repo_model; +use models::repos::repo_tag; +use redis::AsyncCommands; +use sea_orm::{ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter}; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; +#[derive(Debug, Clone, Deserialize)] +pub struct TagGetQuery { + #[serde(default)] + pub name: String, +} +#[derive(Debug, Clone, Deserialize)] +pub struct TagTargetQuery { + #[serde(default)] + pub name: String, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct TagRenameQuery { + pub old_name: String, + pub new_name: String, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct TagInfoResponse { + pub name: String, + pub oid: String, + pub target: String, + pub is_annotated: bool, + pub message: Option, + pub tagger: Option, + pub tagger_email: Option, +} +impl From for TagInfoResponse { + fn from(t: TagInfo) -> Self { + Self { + name: t.name, + oid: t.oid.to_string(), + target: t.target.to_string(), + is_annotated: t.is_annotated, + message: t.message, + tagger: t.tagger, + tagger_email: t.tagger_email, + } + } +} + +impl From for TagInfoResponse { + fn from(t: repo_tag::Model) -> Self { + // is_annotated: description field is set for annotated tags + let is_annotated = t.description.is_some(); + Self { + name: t.name, + oid: t.oid.clone(), + // For tags, oid IS the target commit OID (repo_tag.oid is the peeled target) + target: t.oid, + is_annotated, + message: t.description, + tagger: Some(t.tagger_name), + tagger_email: Some(t.tagger_email), + } + } +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagSummaryResponse { + pub total_count: usize, +} +impl From for TagSummaryResponse { + fn from(s: TagSummary) -> Self { + Self { + total_count: s.total_count, + } + } +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagExistsResponse { + pub name: String, + pub exists: bool, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagTargetResponse { + pub name: String, + pub target: Option, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagIsAnnotatedResponse { + pub name: String, + pub is_annotated: bool, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagMessageResponse { + pub name: String, + pub message: Option, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagTaggerResponse { + pub name: String, + pub tagger: Option, + pub tagger_email: Option, +} +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TagCountResponse { + pub count: usize, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct TagCreateRequest { + pub name: String, + pub target: String, + pub message: Option, + pub tagger_name: Option, + pub tagger_email: Option, + #[serde(default)] + pub force: bool, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct TagCreateLightweightRequest { + pub name: String, + pub target: String, + #[serde(default)] + pub force: bool, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct TagUpdateMessageRequest { + pub name: String, + pub message: String, + pub tagger_name: String, + pub tagger_email: String, +} +impl AppService { + pub async fn git_tag_list( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result, AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + // Try DB first (fastest path after sync) + let tags: Vec = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .all(&self.db) + .await + .map_err(AppError::from)? + .into_iter() + .map(TagInfoResponse::from) + .collect(); + + // Write to Redis cache for subsequent requests + let cache_key = format!("git:tag:list:{}:{}", namespace, repo_name); + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&tags).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + Ok(tags) + } + + pub async fn git_tag_list_names( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result, AppError> { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let names: Vec = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .all(&self.db) + .await + .map_err(AppError::from)? + .into_iter() + .map(|t| t.name) + .collect(); + Ok(names) + } + + pub async fn git_tag_summary( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let count: usize = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .count(&self.db) + .await + .map_err(AppError::from)? as usize; + Ok(TagSummaryResponse { total_count: count }) + } + + pub async fn git_tag_count( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let count: usize = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .count(&self.db) + .await + .map_err(AppError::from)? as usize; + Ok(TagCountResponse { count }) + } + + pub async fn git_tag_get( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + // Try DB first + if let Some(tag) = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)? + { + let response = TagInfoResponse::from(tag); + // Populate cache for next time + let cache_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, query.name); + if let Ok(mut conn) = self.cache.conn().await { + let _ = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await; + } + return Ok(response); + } + + // Fallback to git + let cache_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, query.name); + let domain = git::GitDomain::from_model(repo)?; + let info = domain.tag_get(&query.name)?; + let response = TagInfoResponse::from(info); + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + Ok(response) + } + + pub async fn git_tag_exists( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let exists = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)? + .is_some(); + Ok(TagExistsResponse { + name: query.name, + exists, + }) + } + + pub async fn git_tag_target( + &self, + namespace: String, + repo_name: String, + query: TagTargetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let target = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)? + .map(|t| t.oid); + Ok(TagTargetResponse { + name: query.name, + target, + }) + } + + pub async fn git_tag_is_annotated( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let is_annotated = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)? + .map(|t| t.description.is_some()) + .unwrap_or(false); + Ok(TagIsAnnotatedResponse { + name: query.name, + is_annotated, + }) + } + + pub async fn git_tag_message( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let message = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)? + .and_then(|t| t.description); + Ok(TagMessageResponse { + name: query.name, + message, + }) + } + + pub async fn git_tag_tagger( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let tag = repo_tag::Entity::find() + .filter(repo_tag::Column::Repo.eq(repo.id)) + .filter(repo_tag::Column::Name.eq(&query.name)) + .one(&self.db) + .await + .map_err(AppError::from)?; + Ok(TagTaggerResponse { + name: query.name, + tagger: tag.as_ref().map(|t| t.tagger_name.clone()), + tagger_email: tag.map(|t| t.tagger_email), + }) + } + pub async fn git_tag_create( + &self, + namespace: String, + repo_name: String, + request: TagCreateRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let repo_id = repo.id; + let domain = git::GitDomain::from_model(repo)?; + let tagger = git::CommitSignature { + name: request + .tagger_name + .unwrap_or_else(|| "Anonymous".to_string()), + email: request + .tagger_email + .unwrap_or_else(|| "anonymous@example.com".to_string()), + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let target = git::CommitOid::new(&request.target); + let info = domain.tag_create( + &request.name, + &target, + request.message.as_deref().unwrap_or(""), + &tagger, + request.force, + )?; + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("git:tag:list:{}:{}", namespace, repo_name); + if let Err(e) = conn.del::(key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + let project_id = match repo_model::Entity::find_by_id(repo_id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo_id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "tag_create".to_string(), + title: format!("{} created tag '{}'", user_uid, request.name), + repo_id: Some(repo_id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"tag_name": request.name})), + is_private: false, + }, + ) + .await; + Ok(TagInfoResponse::from(info)) + } + pub async fn git_tag_create_lightweight( + &self, + namespace: String, + repo_name: String, + request: TagCreateLightweightRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let repo_id = repo.id; + let domain = git::GitDomain::from_model(repo)?; + let target = git::CommitOid::new(&request.target); + let info = domain.tag_create_lightweight(&request.name, &target, request.force)?; + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("git:tag:list:{}:{}", namespace, repo_name); + if let Err(e) = conn.del::(key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + let project_id = match repo_model::Entity::find_by_id(repo_id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo_id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "tag_create".to_string(), + title: format!("{} created tag '{}'", user_uid, request.name), + repo_id: Some(repo_id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"tag_name": request.name})), + is_private: false, + }, + ) + .await; + Ok(TagInfoResponse::from(info)) + } + pub async fn git_tag_delete( + &self, + namespace: String, + repo_name: String, + query: TagGetQuery, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let repo_id = repo.id; + let domain = git::GitDomain::from_model(repo)?; + domain.tag_delete(&query.name)?; + if let Ok(mut conn) = self.cache.conn().await { + let list_key = format!("git:tag:list:{}:{}", namespace, repo_name); + let get_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, query.name); + if let Err(e) = conn.del::(list_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + if let Err(e) = conn.del::(get_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + let project_id = match repo_model::Entity::find_by_id(repo_id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo_id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "tag_delete".to_string(), + title: format!("{} deleted tag '{}'", user_uid, query.name), + repo_id: Some(repo_id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"tag_name": query.name})), + is_private: false, + }, + ) + .await; + Ok(()) + } + pub async fn git_tag_rename( + &self, + namespace: String, + repo_name: String, + query: TagRenameQuery, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let repo_id = repo.id; + let domain = git::GitDomain::from_model(repo)?; + let info = domain.tag_rename(&query.old_name, &query.new_name)?; + if let Ok(mut conn) = self.cache.conn().await { + let list_key = format!("git:tag:list:{}:{}", namespace, repo_name); + let old_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, query.old_name); + let new_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, query.new_name); + if let Err(e) = conn.del::(list_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + if let Err(e) = conn.del::(old_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + if let Err(e) = conn.del::(new_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + let response = TagInfoResponse::from(info); + let project_id = match repo_model::Entity::find_by_id(repo_id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + project_id, + Some(repo_id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "tag_rename".to_string(), + title: format!( + "{} renamed tag '{}' to '{}'", + user_uid, query.old_name, query.new_name + ), + repo_id: Some(repo_id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some( + serde_json::json!({"old_name": query.old_name, "new_name": query.new_name}), + ), + is_private: false, + }, + ) + .await; + Ok(response) + } + pub async fn git_tag_update_message( + &self, + namespace: String, + repo_name: String, + request: TagUpdateMessageRequest, + ctx: &Session, + ) -> Result { + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + let _repo_id = repo.id; + let domain = git::GitDomain::from_model(repo)?; + let tagger = git::CommitSignature { + name: request.tagger_name, + email: request.tagger_email, + time_secs: chrono::Utc::now().timestamp(), + offset_minutes: 0, + }; + let info = domain.tag_update_message(&request.name, &request.message, &tagger)?; + if let Ok(mut conn) = self.cache.conn().await { + let list_key = format!("git:tag:list:{}:{}", namespace, repo_name); + let get_key = format!("git:tag:get:{}:{}:{}", namespace, repo_name, request.name); + if let Err(e) = conn.del::(list_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + if let Err(e) = conn.del::(get_key).await { + slog::debug!(self.logs, "cache del failed (non-fatal): {}", e); + } + } + Ok(TagInfoResponse::from(info)) + } +} diff --git a/libs/service/git/tree.rs b/libs/service/git/tree.rs new file mode 100644 index 0000000..6099488 --- /dev/null +++ b/libs/service/git/tree.rs @@ -0,0 +1,351 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::{TreeEntry, TreeInfo}; +use redis::AsyncCommands; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize)] +pub struct TreeGetQuery { + #[serde(default)] + pub oid: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TreeEntryQuery { + #[serde(default)] + pub oid: String, + pub index: usize, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TreeEntryByPathQuery { + #[serde(default)] + pub oid: String, + pub path: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TreeEntryByCommitPathQuery { + pub commit: String, + pub path: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TreeDiffQuery { + pub old_tree: String, + pub new_tree: String, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TreeInfoResponse { + pub oid: String, + pub entry_count: usize, + pub is_empty: bool, +} + +impl From for TreeInfoResponse { + fn from(t: TreeInfo) -> Self { + Self { + oid: t.oid.to_string(), + entry_count: t.entry_count, + is_empty: t.is_empty, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct TreeEntryResponse { + pub name: String, + pub oid: String, + pub kind: String, + pub filemode: u32, + pub is_binary: bool, +} + +impl From for TreeEntryResponse { + fn from(e: TreeEntry) -> Self { + Self { + name: e.name, + oid: e.oid.to_string(), + kind: e.kind, + filemode: e.filemode, + is_binary: e.is_binary, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TreeExistsResponse { + pub oid: String, + pub exists: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TreeEntryCountResponse { + pub oid: String, + pub count: usize, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TreeIsEmptyResponse { + pub oid: String, + pub is_empty: bool, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct TreeDiffStatsResponse { + pub old_tree: String, + pub new_tree: String, + pub files_changed: usize, + pub insertions: usize, + pub deletions: usize, +} + +impl AppService { + pub async fn git_tree_get( + &self, + namespace: String, + repo_name: String, + query: TreeGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let info = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_get(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeInfoResponse::from(info)) + } + + pub async fn git_tree_exists( + &self, + namespace: String, + repo_name: String, + query: TreeGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let exists = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + Ok::<_, git::GitError>(domain.tree_exists(&oid)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeExistsResponse { + oid: query.oid, + exists, + }) + } + + pub async fn git_tree_list( + &self, + namespace: String, + repo_name: String, + query: TreeGetQuery, + ctx: &Session, + ) -> Result, AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let cache_key = format!("git:tree:list:{}:{}:{}", namespace, repo_name, query.oid); + + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str(&cached) { + return Ok(cached); + } + } + } + + let oid_str = query.oid.clone(); + + let entries = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_list(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + let response: Vec = + entries.into_iter().map(TreeEntryResponse::from).collect(); + + if let Ok(mut conn) = self.cache.conn().await { + if let Err(e) = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 60 * 60, + ) + .await + { + slog::debug!(self.logs, "cache set failed (non-fatal): {}", e); + } + } + + Ok(response) + } + + pub async fn git_tree_entry( + &self, + namespace: String, + repo_name: String, + query: TreeEntryQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + let index = query.index; + + let entry = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_entry(&oid, index) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeEntryResponse::from(entry)) + } + + pub async fn git_tree_entry_by_path( + &self, + namespace: String, + repo_name: String, + query: TreeEntryByPathQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + let path = query.path.clone(); + + let entry = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_entry_by_path(&oid, &path) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeEntryResponse::from(entry)) + } + + pub async fn git_tree_entry_by_commit_path( + &self, + namespace: String, + repo_name: String, + query: TreeEntryByCommitPathQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let commit_str = query.commit.clone(); + let path = query.path.clone(); + + let entry = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let commit = git::CommitOid::new(&commit_str); + domain.tree_entry_by_path_from_commit(&commit, &path) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeEntryResponse::from(entry)) + } + + pub async fn git_tree_entry_count( + &self, + namespace: String, + repo_name: String, + query: TreeGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let count = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_entry_count(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeEntryCountResponse { + oid: query.oid, + count, + }) + } + + pub async fn git_tree_is_empty( + &self, + namespace: String, + repo_name: String, + query: TreeGetQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let oid_str = query.oid.clone(); + + let is_empty = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let oid = git::CommitOid::new(&oid_str); + domain.tree_is_empty(&oid) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeIsEmptyResponse { + oid: query.oid, + is_empty, + }) + } + + pub async fn git_tree_diffstats( + &self, + namespace: String, + repo_name: String, + query: TreeDiffQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let old_tree_str = query.old_tree.clone(); + let new_tree_str = query.new_tree.clone(); + + let stats = tokio::task::spawn_blocking(move || { + let domain = git::GitDomain::from_model(repo)?; + let old_tree = git::CommitOid::new(&old_tree_str); + let new_tree = git::CommitOid::new(&new_tree_str); + domain.tree_diffstats(&old_tree, &new_tree) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {}", e)))? + .map_err(AppError::from)?; + + Ok(TreeDiffStatsResponse { + old_tree: query.old_tree, + new_tree: query.new_tree, + files_changed: stats.files_changed, + insertions: stats.insertions, + deletions: stats.deletions, + }) + } +} diff --git a/libs/service/git/watch.rs b/libs/service/git/watch.rs new file mode 100644 index 0000000..91430c6 --- /dev/null +++ b/libs/service/git/watch.rs @@ -0,0 +1,219 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::repos::repo_watch; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WatchCountResponse { + pub count: i64, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WatchUserInfo { + pub uid: String, + pub username: String, + pub avatar_url: String, +} +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WatchUserListResponse { + pub users: Vec, +} +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct GitWatchRequest { + #[serde(default = "default_show_dashboard")] + pub show_dashboard: bool, + #[serde(default)] + pub notify_email: bool, +} +fn default_show_dashboard() -> bool { + true +} +use models::repos::RepoWatch; +use models::repos::repo as repo_model; +use uuid::Uuid; +impl AppService { + pub async fn git_watch( + &self, + namespace: String, + repo_name: String, + request: GitWatchRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let existing: Option = RepoWatch::find() + .filter(repo_watch::Column::User.eq(user_uid)) + .filter(repo_watch::Column::Repo.eq(repo.id)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::InternalServerError( + "already watching".to_string(), + )); + } + RepoWatch::insert(repo_watch::ActiveModel { + id: Default::default(), + user: Set(user_uid), + repo: Set(repo.id), + show_dashboard: Set(request.show_dashboard), + notify_email: Set(request.notify_email), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }) + .exec(&self.db) + .await?; + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "repo_watch".to_string(), + title: format!("{} started watching repository '{}'", user_uid, repo_name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"repo_name": repo_name})), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn git_unwatch( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self + .utils_find_repo(namespace, repo_name.clone(), ctx) + .await?; + let deleted = RepoWatch::delete_many() + .filter(repo_watch::Column::User.eq(user_uid)) + .filter(repo_watch::Column::Repo.eq(repo.id)) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::InternalServerError("not watching".to_string())); + } + let project_id = match repo_model::Entity::find_by_id(repo.id).one(&self.db).await { + Ok(Some(r)) => r.project, + Ok(None) => Uuid::nil(), + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project_id for activity log: {}", + e + ); + Uuid::nil() + } + }; + let _ = self + .project_log_activity( + project_id, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "repo_unwatch".to_string(), + title: format!("{} stopped watching repository '{}'", user_uid, repo_name), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({"repo_name": repo_name})), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn git_is_watched( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let existing: Option = RepoWatch::find() + .filter(repo_watch::Column::User.eq(user_uid)) + .filter(repo_watch::Column::Repo.eq(repo.id)) + .one(&self.db) + .await?; + Ok(existing.is_some()) + } + + pub async fn git_watch_count( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let count = RepoWatch::find() + .filter(repo_watch::Column::Repo.eq(repo.id)) + .count(&self.db) + .await?; + Ok(WatchCountResponse { + count: count as i64, + }) + } + + pub async fn git_watch_user_list( + &self, + namespace: String, + repo_name: String, + pager: crate::Pager, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let page = std::cmp::Ord::max(pager.page, 1); + let par_page = std::cmp::Ord::min(std::cmp::Ord::max(pager.par_page, 1), 1000); + let offset_val = (page - 1).saturating_mul(par_page); + let offset = offset_val as u64; + let watches: Vec = RepoWatch::find() + .filter(repo_watch::Column::Repo.eq(repo.id)) + .order_by_desc(repo_watch::Column::CreatedAt) + .limit(par_page as u64) + .offset(offset) + .all(&self.db) + .await?; + let user_uids: Vec = watches.into_iter().map(|w| w.user).collect(); + if user_uids.is_empty() { + return Ok(WatchUserListResponse { users: vec![] }); + } + let users = models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(user_uids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| WatchUserInfo { + uid: u.uid.to_string(), + username: u.username, + avatar_url: u.avatar_url.unwrap_or_default(), + }) + .collect(); + Ok(WatchUserListResponse { users }) + } +} diff --git a/libs/service/git/webhook.rs b/libs/service/git/webhook.rs new file mode 100644 index 0000000..6fcce30 --- /dev/null +++ b/libs/service/git/webhook.rs @@ -0,0 +1,298 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::repos::repo_webhook; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct WebhookEvent { + #[serde(default)] + pub push: bool, + #[serde(default)] + pub tag_push: bool, + #[serde(default)] + pub pull_request: bool, + #[serde(default)] + pub issue_comment: bool, + #[serde(default)] + pub release: bool, +} + +impl Default for WebhookEvent { + fn default() -> Self { + Self { + push: true, + tag_push: false, + pull_request: false, + issue_comment: false, + release: false, + } + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct CreateWebhookParams { + pub url: String, + #[serde(default)] + pub content_type: Option, + #[serde(default)] + pub secret: Option, + #[serde(default)] + pub insecure_ssl: Option, + #[serde(default)] + pub events: WebhookEvent, + #[serde(default = "default_active")] + pub active: bool, +} + +fn default_active() -> bool { + true +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct UpdateWebhookParams { + pub url: Option, + pub content_type: Option, + pub secret: Option, + pub insecure_ssl: Option, + pub events: Option, + pub active: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct WebhookResponse { + pub id: i64, + pub repo_uuid: String, + pub url: String, + pub content_type: String, + pub secret: Option, + pub events: WebhookEvent, + pub active: bool, + pub created_at: chrono::DateTime, + pub last_delivered_at: Option>, + pub touch_count: i64, +} + +impl From for WebhookResponse { + fn from(m: repo_webhook::Model) -> Self { + let events: WebhookEvent = serde_json::from_value(m.event.clone()).unwrap_or_default(); + let url = m.url.unwrap_or_default(); + let (content_type, secret, active) = + serde_json::from_value::(m.event.clone()) + .ok() + .map(|v| { + ( + v.get("content_type") + .and_then(|v| v.as_str()) + .unwrap_or("json") + .to_string(), + v.get("secret").and_then(|v| v.as_str()).map(String::from), + v.get("active").and_then(|v| v.as_bool()).unwrap_or(true), + ) + }) + .unwrap_or(("json".to_string(), None, true)); + + WebhookResponse { + id: m.id, + repo_uuid: m.repo.to_string(), + url, + content_type, + secret, + events, + active, + created_at: m.created_at, + last_delivered_at: m.last_delivered_at, + touch_count: m.touch_count, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct WebhookListResponse { + pub webhooks: Vec, + pub total: usize, +} + +impl AppService { + pub async fn git_webhook_list( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let webhooks = repo_webhook::Entity::find() + .filter(repo_webhook::Column::Repo.eq(repo.id)) + .order_by_asc(repo_webhook::Column::Id) + .all(&self.db) + .await?; + let total = webhooks.len(); + let webhooks = webhooks.into_iter().map(WebhookResponse::from).collect(); + Ok(WebhookListResponse { webhooks, total }) + } + + pub async fn git_webhook_create( + &self, + namespace: String, + repo_name: String, + params: CreateWebhookParams, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let _ = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let event_json = serde_json::json!({ + "push": params.events.push, + "tag_push": params.events.tag_push, + "pull_request": params.events.pull_request, + "issue_comment": params.events.issue_comment, + "release": params.events.release, + "content_type": params.content_type.unwrap_or_else(|| "json".to_string()), + "secret": params.secret, + "active": params.active, + }); + + let model = repo_webhook::ActiveModel { + repo: Set(repo.id), + event: Set(event_json), + url: Set(Some(params.url)), + access_key: Set(None), + secret_key: Set(params.secret), + created_at: Set(Utc::now()), + last_delivered_at: Set(None), + touch_count: Set(0), + ..Default::default() + } + .insert(&self.db) + .await?; + + Ok(WebhookResponse::from(model)) + } + + pub async fn git_webhook_get( + &self, + namespace: String, + repo_name: String, + webhook_id: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + let webhook = repo_webhook::Entity::find_by_id(webhook_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Webhook not found".to_string()))?; + + if webhook.repo != repo.id { + return Err(AppError::NotFound("Webhook not found".to_string())); + } + + Ok(WebhookResponse::from(webhook)) + } + + pub async fn git_webhook_update( + &self, + namespace: String, + repo_name: String, + webhook_id: i64, + params: UpdateWebhookParams, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let _ = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let webhook = repo_webhook::Entity::find_by_id(webhook_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Webhook not found".to_string()))?; + + if webhook.repo != repo.id { + return Err(AppError::NotFound("Webhook not found".to_string())); + } + + let mut active: repo_webhook::ActiveModel = webhook.clone().into(); + if let Some(url) = params.url { + active.url = Set(Some(url)); + } + let secret_val = params.secret.clone(); + if let Some(secret) = params.secret { + active.secret_key = Set(Some(secret)); + } + if params.events.is_some() || params.content_type.is_some() || params.active.is_some() { + let existing: serde_json::Value = webhook.event.clone(); + let events = params + .events + .unwrap_or_else(|| serde_json::from_value(existing.clone()).unwrap_or_default()); + let content_type = params + .content_type + .or_else(|| { + existing + .get("content_type") + .and_then(|v| v.as_str()) + .map(String::from) + }) + .unwrap_or_else(|| "json".to_string()); + let active_val = params + .active + .or_else(|| existing.get("active").and_then(|v| v.as_bool())) + .unwrap_or(true); + let secret_in_event = existing + .get("secret") + .and_then(|v| v.as_str()) + .map(String::from); + let final_secret = secret_val.clone().or(secret_in_event); + + active.event = Set(serde_json::json!({ + "push": events.push, + "tag_push": events.tag_push, + "pull_request": events.pull_request, + "issue_comment": events.issue_comment, + "release": events.release, + "content_type": content_type, + "secret": final_secret, + "active": active_val, + })); + } + + let updated = active.update(&self.db).await?; + Ok(WebhookResponse::from(updated)) + } + + pub async fn git_webhook_delete( + &self, + namespace: String, + repo_name: String, + webhook_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + let _ = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let webhook = repo_webhook::Entity::find_by_id(webhook_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Webhook not found".to_string()))?; + + if webhook.repo != repo.id { + return Err(AppError::NotFound("Webhook not found".to_string())); + } + + repo_webhook::Entity::delete_by_id(webhook_id) + .exec(&self.db) + .await?; + Ok(()) + } +} diff --git a/libs/service/issue/assignee.rs b/libs/service/issue/assignee.rs new file mode 100644 index 0000000..5736c7e --- /dev/null +++ b/libs/service/issue/assignee.rs @@ -0,0 +1,247 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::issues::{issue, issue_assignee}; +use models::projects::project_members; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueAssignUserRequest { + pub user_id: Uuid, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueAssigneeResponse { + pub issue: Uuid, + pub user_id: Uuid, + pub username: String, + pub assigned_at: chrono::DateTime, +} + +impl AppService { + /// List assignees for an issue. + pub async fn issue_assignee_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let assignees = issue_assignee::Entity::find() + .filter(issue_assignee::Column::Issue.eq(issue.id)) + .all(&self.db) + .await?; + + let user_ids: Vec = assignees.iter().map(|a| a.user).collect(); + let users = if user_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = assignees + .into_iter() + .filter_map(|a| { + let username = users.iter().find(|u| u.uid == a.user)?.username.clone(); + Some(IssueAssigneeResponse { + issue: a.issue, + user_id: a.user, + username, + assigned_at: a.assigned_at, + }) + }) + .collect(); + + Ok(responses) + } + + /// Assign a user to an issue. + pub async fn issue_assignee_add( + &self, + project_name: String, + issue_number: i64, + request: IssueAssignUserRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + // Must be project member + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + // Check if assignee is also a member + let assignee_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(request.user_id)) + .one(&self.db) + .await?; + if assignee_member.is_none() { + return Err(AppError::NotFound( + "User is not a project member".to_string(), + )); + } + + let now = Utc::now(); + let active = issue_assignee::ActiveModel { + issue: Set(issue.id), + user: Set(request.user_id), + assigned_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let username = user::Entity::find_by_id(request.user_id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + + let response = Ok(IssueAssigneeResponse { + issue: model.issue, + user_id: model.user, + username: username.clone(), + assigned_at: model.assigned_at, + }); + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_assignee_add".to_string(), + title: format!( + "{} assigned {} to issue #{}", + user_uid, + username.clone(), + issue_number + ), + repo_id: None, + content: None, + event_id: Some(model.issue), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "assignee_uid": request.user_id, + "assignee_username": username.clone(), + })), + is_private: false, + }, + ) + .await; + + response + } + + /// Remove an assignee from an issue. + pub async fn issue_assignee_remove( + &self, + project_name: String, + issue_number: i64, + assignee_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + // Allow: the assignee themselves, or any admin/owner, or the issue author + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_self = assignee_id == user_uid; + let is_author = issue.author == user_uid; + let is_admin = role == models::projects::MemberRole::Admin + || role == models::projects::MemberRole::Owner; + + if !is_self && !is_author && !is_admin { + return Err(AppError::NoPower); + } + + issue_assignee::Entity::delete_many() + .filter(issue_assignee::Column::Issue.eq(issue.id)) + .filter(issue_assignee::Column::User.eq(assignee_id)) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let assignee_username = user::Entity::find_by_id(assignee_id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_else(|| assignee_id.to_string()); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_assignee_remove".to_string(), + title: format!( + "{} removed {} from issue #{}", + user_uid, assignee_username, issue_number + ), + repo_id: None, + content: None, + event_id: Some(issue.id), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "removed_assignee_uid": assignee_id, + "removed_assignee_username": assignee_username, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/issue/comment.rs b/libs/service/issue/comment.rs new file mode 100644 index 0000000..d29e2e6 --- /dev/null +++ b/libs/service/issue/comment.rs @@ -0,0 +1,373 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::issues::{issue, issue_comment}; +use models::projects::project_members; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueCommentCreateRequest { + pub body: String, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueCommentUpdateRequest { + pub body: String, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueCommentResponse { + pub id: i64, + pub issue: Uuid, + pub author: Uuid, + pub author_username: String, + pub body: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for IssueCommentResponse { + fn from(c: issue_comment::Model) -> Self { + Self { + id: c.id, + issue: c.issue, + author: c.author, + author_username: String::new(), + body: c.body, + created_at: c.created_at, + updated_at: c.updated_at, + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueCommentListResponse { + pub comments: Vec, + pub total: i64, + pub page: i64, + pub per_page: i64, +} + +impl AppService { + /// List comments on an issue. + pub async fn issue_comment_list( + &self, + project_name: String, + issue_number: i64, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + let offset = (page - 1) * per_page; + + let total: u64 = issue_comment::Entity::find() + .filter(issue_comment::Column::Issue.eq(issue.id)) + .count(&self.db) + .await?; + + let comments = issue_comment::Entity::find() + .filter(issue_comment::Column::Issue.eq(issue.id)) + .order_by_asc(issue_comment::Column::CreatedAt) + .offset(offset as u64) + .limit(per_page as u64) + .all(&self.db) + .await?; + + let author_ids: Vec = comments.iter().map(|c| c.author).collect(); + let authors = if author_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(author_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = comments + .into_iter() + .map(|c| { + let username = authors + .iter() + .find(|u| u.uid == c.author) + .map(|u| u.username.clone()) + .unwrap_or_default(); + IssueCommentResponse { + author_username: username, + ..IssueCommentResponse::from(c) + } + }) + .collect(); + + Ok(IssueCommentListResponse { + comments: responses, + total: total as i64, + page, + per_page, + }) + } + + /// Get a single comment by ID. + pub async fn issue_comment_get( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + let author = user::Entity::find_by_id(comment.author) + .one(&self.db) + .await + .ok() + .flatten(); + let username = author.map(|u| u.username).unwrap_or_default(); + + Ok(IssueCommentResponse { + author_username: username, + ..IssueCommentResponse::from(comment) + }) + } + + /// Create a comment on an issue. + pub async fn issue_comment_create( + &self, + project_name: String, + issue_number: i64, + request: IssueCommentCreateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + if member.is_none() { + return Err(AppError::NoPower); + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let now = Utc::now(); + let active = issue_comment::ActiveModel { + issue: Set(issue.id), + author: Set(user_uid), + body: Set(request.body), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + crate::project::activity::ActivityLogParams { + event_type: "issue_comment".to_string(), + title: format!("{} commented on issue #{}", actor_username, issue_number), + repo_id: None, + content: Some(model.body.clone()), + event_id: None, + event_sub_id: Some(issue_number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(IssueCommentResponse { + author_username: actor_username, + ..IssueCommentResponse::from(model) + }) + } + + /// Update a comment body. + pub async fn issue_comment_update( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + request: IssueCommentUpdateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + // Only author can edit their comment + if comment.author != user_uid { + return Err(AppError::NoPower); + } + + let mut active: issue_comment::ActiveModel = comment.clone().into(); + active.body = Set(request.body); + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + let username = user::Entity::find_by_id(model.author) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_comment_update".to_string(), + title: format!("{} updated a comment on issue #{}", user_uid, issue_number), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(issue_number), + metadata: None, + is_private: false, + }, + ) + .await; + Ok(IssueCommentResponse { + author_username: username, + ..IssueCommentResponse::from(model) + }) + } + + /// Delete a comment. + pub async fn issue_comment_delete( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + // Allow: author of comment, or issue author, or admin/owner + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_comment_author = comment.author == user_uid; + let is_issue_author = issue.author == user_uid; + let is_admin = role == models::projects::MemberRole::Admin + || role == models::projects::MemberRole::Owner; + + if !is_comment_author && !is_issue_author && !is_admin { + return Err(AppError::NoPower); + } + + issue_comment::Entity::delete_by_id(comment_id) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_comment_delete".to_string(), + title: format!("{} deleted a comment on issue #{}", user_uid, issue_number), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(issue_number), + metadata: None, + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/issue/issue.rs b/libs/service/issue/issue.rs new file mode 100644 index 0000000..ee8a520 --- /dev/null +++ b/libs/service/issue/issue.rs @@ -0,0 +1,587 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::issues::{ + IssueState, issue, issue_assignee, issue_comment, issue_label, issue_repo, issue_subscriber, +}; +use models::projects::project_members; +use models::users::user; +use redis::AsyncCommands; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueCreateRequest { + pub title: String, + pub body: Option, + pub milestone: Option, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueUpdateRequest { + pub title: Option, + pub body: Option, + pub milestone: Option, + pub state: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct IssueResponse { + pub id: Uuid, + pub project: Uuid, + pub number: i64, + pub title: String, + pub body: Option, + pub state: String, + pub author: Uuid, + pub author_username: Option, + pub milestone: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, + pub closed_at: Option>, + pub created_by_ai: bool, +} + +impl From for IssueResponse { + fn from(i: issue::Model) -> Self { + Self { + id: i.id, + project: i.project, + number: i.number, + title: i.title, + body: i.body, + state: i.state, + author: i.author, + author_username: None, + milestone: i.milestone, + created_at: i.created_at, + updated_at: i.updated_at, + closed_at: i.closed_at, + created_by_ai: i.created_by_ai, + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueListResponse { + pub issues: Vec, + pub total: i64, + pub page: i64, + pub per_page: i64, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueSummaryResponse { + pub total: i64, + pub open: i64, + pub closed: i64, +} + +impl AppService { + /// List issues for a project with optional state filter. + pub async fn issue_list( + &self, + project_name: String, + state: Option, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + + // Check membership for private projects + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + let offset = (page - 1) * per_page; + + let mut query = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .order_by_desc(issue::Column::CreatedAt); + + if let Some(ref s) = state { + query = query.filter(issue::Column::State.eq(s)); + } + + let total = query.clone().count(&self.db).await?; + let issues = query + .offset(offset as u64) + .limit(per_page as u64) + .all(&self.db) + .await?; + + let author_ids: Vec = issues.iter().map(|i| i.author).collect(); + let authors = if author_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(author_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = issues + .into_iter() + .map(|i| { + let username = authors + .iter() + .find(|u| u.uid == i.author) + .map(|u| u.username.clone()); + IssueResponse { + author_username: username, + ..IssueResponse::from(i) + } + }) + .collect(); + + Ok(IssueListResponse { + issues: responses, + total: total as i64, + page, + per_page, + }) + } + + /// Get a single issue by project + number. + pub async fn issue_get( + &self, + project_name: String, + number: i64, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let cache_key = format!("issue:get:{}:{}", project.id, number); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let author = user::Entity::find_by_id(issue.author) + .one(&self.db) + .await + .ok() + .flatten(); + let username = author.map(|u| u.username); + + let response = IssueResponse { + author_username: username, + ..IssueResponse::from(issue) + }; + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + /// Get the next sequential issue number for a project. + async fn next_issue_number(&self, project_id: Uuid) -> Result { + let max_num: Option> = issue::Entity::find() + .filter(issue::Column::Project.eq(project_id)) + .select_only() + .column_as(issue::Column::Number.max(), "max_num") + .into_tuple::>() + .one(&self.db) + .await?; + Ok(max_num.flatten().unwrap_or(0) + 1) + } + + /// Create a new issue. + pub async fn issue_create( + &self, + project_name: String, + request: IssueCreateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + // Any project member can create issues + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + if member.is_none() { + return Err(AppError::NoPower); + } + + let number = self.next_issue_number(project.id).await?; + let now = Utc::now(); + + let active = issue::ActiveModel { + id: Set(Uuid::now_v7()), + project: Set(project.id), + number: Set(number), + title: Set(request.title), + body: Set(request.body), + state: Set(IssueState::Open.to_string()), + author: Set(user_uid), + milestone: Set(request.milestone), + created_at: Set(now), + updated_at: Set(now), + closed_at: Set(None), + created_by_ai: Set(false), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + // Log activity + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "issue_open".to_string(), + title: format!("{} opened issue #{}", actor_username, number), + repo_id: None, + content: Some(model.title.clone()), + event_id: Some(model.id), + event_sub_id: Some(model.number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(IssueResponse::from(model)) + } + + /// Update an issue (title, body, milestone). + pub async fn issue_update( + &self, + project_name: String, + number: i64, + request: IssueUpdateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + if member.is_none() { + return Err(AppError::NoPower); + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let mut active: issue::ActiveModel = issue.clone().into(); + if let Some(title) = request.title { + active.title = Set(title); + } + if let Some(body) = request.body { + active.body = Set(Some(body)); + } + if let Some(milestone) = request.milestone { + active.milestone = Set(Some(milestone)); + } + active.updated_at = Set(Utc::now()); + + let model = active.update(&self.db).await?; + + self.invalidate_issue_cache(project.id, number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "issue_update".to_string(), + title: format!("{} updated issue #{}", actor_username, number), + repo_id: None, + content: Some(model.title.clone()), + event_id: Some(model.id), + event_sub_id: Some(model.number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(IssueResponse::from(model)) + } + + /// Close an issue. + pub async fn issue_close( + &self, + project_name: String, + number: i64, + ctx: &Session, + ) -> Result { + self.issue_set_state(project_name, number, IssueState::Closed, ctx) + .await + } + + /// Reopen a closed issue. + pub async fn issue_reopen( + &self, + project_name: String, + number: i64, + ctx: &Session, + ) -> Result { + self.issue_set_state(project_name, number, IssueState::Open, ctx) + .await + } + + async fn issue_set_state( + &self, + project_name: String, + number: i64, + state: IssueState, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + if member.is_none() { + return Err(AppError::NoPower); + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let now = Utc::now(); + let closed_at = if state == IssueState::Closed { + Some(now) + } else { + None + }; + + let mut active: issue::ActiveModel = issue.clone().into(); + active.state = Set(state.to_string()); + active.updated_at = Set(now); + active.closed_at = Set(closed_at); + let model = active.update(&self.db).await?; + + self.invalidate_issue_cache(project.id, number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let event_type = if state == IssueState::Closed { + "issue_close" + } else { + "issue_reopen" + }; + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: event_type.to_string(), + title: format!( + "{} {} issue #{}", + actor_username, + if state == IssueState::Closed { + "closed" + } else { + "reopened" + }, + number + ), + repo_id: None, + content: Some(model.title.clone()), + event_id: Some(model.id), + event_sub_id: Some(model.number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(IssueResponse::from(model)) + } + + /// Delete an issue. Only author or admin/owner can delete. + pub async fn issue_delete( + &self, + project_name: String, + number: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + // Allow if user is author OR admin/owner + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_author = issue.author == user_uid; + let is_admin = role == models::projects::MemberRole::Admin + || role == models::projects::MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + // Cascade delete related records + issue_comment::Entity::delete_many() + .filter(issue_comment::Column::Issue.eq(issue.id)) + .exec(&self.db) + .await?; + issue_assignee::Entity::delete_many() + .filter(issue_assignee::Column::Issue.eq(issue.id)) + .exec(&self.db) + .await?; + issue_label::Entity::delete_many() + .filter(issue_label::Column::Issue.eq(issue.id)) + .exec(&self.db) + .await?; + issue_subscriber::Entity::delete_many() + .filter(issue_subscriber::Column::Issue.eq(issue.id)) + .exec(&self.db) + .await?; + issue_repo::Entity::delete_many() + .filter(issue_repo::Column::Issue.eq(issue.id)) + .exec(&self.db) + .await?; + + issue::Entity::delete_by_id((issue.id, issue.number)) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "issue_delete".to_string(), + title: format!("{} deleted issue #{}", actor_username, number), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(()) + } + + /// Get issue summary (open/closed counts). + pub async fn issue_summary( + &self, + project_name: String, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let total: u64 = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + let open: u64 = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::State.eq(IssueState::Open.to_string())) + .count(&self.db) + .await?; + let closed = total - open; + + Ok(IssueSummaryResponse { + total: total as i64, + open: open as i64, + closed: closed as i64, + }) + } + + pub(crate) async fn invalidate_issue_cache(&self, project_id: Uuid, number: i64) { + if let Ok(mut conn) = self.cache.conn().await { + let key = format!("issue:get:{}:{}", project_id, number); + let _: Option<()> = conn.del::<_, ()>(key).await.ok(); + } + } +} diff --git a/libs/service/issue/label.rs b/libs/service/issue/label.rs new file mode 100644 index 0000000..0483c20 --- /dev/null +++ b/libs/service/issue/label.rs @@ -0,0 +1,353 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::issues::{issue, issue_label}; +use models::projects::project_members; +use models::system::label; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueAddLabelRequest { + pub label_id: i64, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateLabelRequest { + pub name: String, + pub color: String, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueLabelResponse { + pub issue: Uuid, + pub label_id: i64, + pub label_name: Option, + pub label_color: Option, + pub relation_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct LabelResponse { + pub id: i64, + pub project: Uuid, + pub name: String, + pub color: String, +} + +impl From for LabelResponse { + fn from(l: label::Model) -> Self { + Self { + id: l.id, + project: l.project, + name: l.name, + color: l.color, + } + } +} + +impl AppService { + /// List all labels for a project. + pub async fn label_list( + &self, + project_name: String, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let labels = label::Entity::find() + .filter(label::Column::Project.eq(project.id)) + .order_by_asc(label::Column::Name) + .all(&self.db) + .await?; + + Ok(labels.into_iter().map(LabelResponse::from).collect()) + } + + pub async fn label_create( + &self, + project_name: String, + request: CreateLabelRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + // Check for duplicate label name in project + let existing = label::Entity::find() + .filter(label::Column::Project.eq(project.id)) + .filter(label::Column::Name.eq(&request.name)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::BadRequest( + "Label with this name already exists".to_string(), + )); + } + + let active = label::ActiveModel { + id: Set(0), // auto-increment + project: Set(project.id), + name: Set(request.name), + color: Set(request.color), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + Ok(LabelResponse::from(model)) + } + + pub async fn label_delete( + &self, + project_name: String, + label_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let lbl = label::Entity::find_by_id(label_id) + .filter(label::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + // Cascade delete issue-label relations + issue_label::Entity::delete_many() + .filter(issue_label::Column::Label.eq(lbl.id)) + .exec(&self.db) + .await?; + + label::Entity::delete_by_id(lbl.id).exec(&self.db).await?; + + Ok(()) + } + + /// List labels on an issue. + pub async fn issue_label_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let issue_labels = issue_label::Entity::find() + .filter(issue_label::Column::Issue.eq(issue.id)) + .all(&self.db) + .await?; + + let label_ids: Vec = issue_labels.iter().map(|l| l.label).collect(); + let labels = if label_ids.is_empty() { + vec![] + } else { + label::Entity::find() + .filter(label::Column::Id.is_in(label_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = issue_labels + .into_iter() + .map(|il| { + let lbl = labels.iter().find(|l| l.id == il.label); + IssueLabelResponse { + issue: il.issue, + label_id: il.label, + label_name: lbl.map(|l| l.name.clone()), + label_color: lbl.map(|l| l.color.clone()), + relation_at: il.relation_at, + } + }) + .collect(); + + Ok(responses) + } + + /// Add a label to an issue. + pub async fn issue_label_add( + &self, + project_name: String, + issue_number: i64, + request: IssueAddLabelRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let lbl = label::Entity::find_by_id(request.label_id) + .filter(label::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + // Check if already linked + let existing = issue_label::Entity::find() + .filter(issue_label::Column::Issue.eq(issue.id)) + .filter(issue_label::Column::Label.eq(request.label_id)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::BadRequest("Label already applied".to_string())); + } + + let now = Utc::now(); + let active = issue_label::ActiveModel { + issue: Set(issue.id), + label: Set(lbl.id), + relation_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let response = Ok(IssueLabelResponse { + issue: model.issue, + label_id: model.label, + label_name: Some(lbl.name.clone()), + label_color: Some(lbl.color.clone()), + relation_at: model.relation_at, + }); + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_label_add".to_string(), + title: format!( + "{} added label '{}' to issue #{}", + user_uid, + lbl.name.clone(), + issue_number + ), + repo_id: None, + content: None, + event_id: Some(model.issue), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "label_id": lbl.id, + "label_name": lbl.name.clone(), + "label_color": lbl.color.clone(), + })), + is_private: false, + }, + ) + .await; + + response + } + + /// Remove a label from an issue. + pub async fn issue_label_remove( + &self, + project_name: String, + issue_number: i64, + label_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let issue_id = issue.id; + let lbl = label::Entity::find_by_id(label_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + let label_name = lbl.name.clone(); + + issue_label::Entity::delete_many() + .filter(issue_label::Column::Issue.eq(issue.id)) + .filter(issue_label::Column::Label.eq(label_id)) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_label_remove".to_string(), + title: format!( + "{} removed label '{}' from issue #{}", + user_uid, label_name, issue_number + ), + repo_id: None, + content: None, + event_id: Some(issue_id), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "label_id": label_id, + "label_name": label_name, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/issue/mod.rs b/libs/service/issue/mod.rs new file mode 100644 index 0000000..e59df1f --- /dev/null +++ b/libs/service/issue/mod.rs @@ -0,0 +1,23 @@ +pub mod assignee; +pub mod comment; +pub mod issue; +pub mod label; +pub mod pull_request; +pub mod reaction; +pub mod repo; +pub mod subscriber; + +// Re-export types from submodules for convenient access +pub use assignee::{IssueAssignUserRequest, IssueAssigneeResponse}; +pub use comment::{ + IssueCommentCreateRequest, IssueCommentListResponse, IssueCommentResponse, + IssueCommentUpdateRequest, +}; +pub use issue::{ + IssueCreateRequest, IssueListResponse, IssueResponse, IssueSummaryResponse, IssueUpdateRequest, +}; +pub use label::{CreateLabelRequest, IssueAddLabelRequest, IssueLabelResponse, LabelResponse}; +pub use pull_request::{IssueLinkPullRequestRequest, IssuePullRequestResponse}; +pub use reaction::{ReactionAddRequest, ReactionListResponse, ReactionResponse}; +pub use repo::{IssueLinkRepoRequest, IssueRepoResponse}; +pub use subscriber::IssueSubscriberResponse; diff --git a/libs/service/issue/pull_request.rs b/libs/service/issue/pull_request.rs new file mode 100644 index 0000000..dff9c3a --- /dev/null +++ b/libs/service/issue/pull_request.rs @@ -0,0 +1,208 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::issues::{issue, issue_pull_request}; +use models::projects::project_members; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueLinkPullRequestRequest { + pub repo: Uuid, + pub number: i64, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssuePullRequestResponse { + pub issue: Uuid, + pub repo: Uuid, + pub number: i64, + pub relation_at: chrono::DateTime, +} + +impl AppService { + /// List pull requests linked to an issue. + pub async fn issue_pull_request_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let prs = issue_pull_request::Entity::find() + .filter(issue_pull_request::Column::Issue.eq(issue.id)) + .all(&self.db) + .await?; + + let responses: Vec = prs + .into_iter() + .map(|pr| IssuePullRequestResponse { + issue: pr.issue, + repo: pr.repo, + number: pr.number, + relation_at: pr.relation_at, + }) + .collect(); + + Ok(responses) + } + + /// Link a pull request to an issue. + pub async fn issue_pull_request_link( + &self, + project_name: String, + issue_number: i64, + request: IssueLinkPullRequestRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let existing = issue_pull_request::Entity::find() + .filter(issue_pull_request::Column::Issue.eq(issue.id)) + .filter(issue_pull_request::Column::Repo.eq(request.repo)) + .filter(issue_pull_request::Column::Number.eq(request.number)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::BadRequest( + "PR already linked to this issue".to_string(), + )); + } + + let now = Utc::now(); + let active = issue_pull_request::ActiveModel { + issue: Set(issue.id), + repo: Set(request.repo), + number: Set(request.number), + relation_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let response = Ok(IssuePullRequestResponse { + issue: model.issue, + repo: model.repo, + number: model.number, + relation_at: model.relation_at, + }); + + let _ = self + .project_log_activity( + project.id, + Some(model.repo), + user_uid, + ActivityLogParams { + event_type: "pr_issue_link".to_string(), + title: format!( + "{} linked PR #{} to issue #{}", + user_uid, request.number, issue_number + ), + repo_id: Some(model.repo), + content: None, + event_id: Some(model.issue), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "pr_number": request.number, + "pr_repo": request.repo, + })), + is_private: false, + }, + ) + .await; + + response + } + + /// Unlink a pull request from an issue. + pub async fn issue_pull_request_unlink( + &self, + project_name: String, + issue_number: i64, + repo_id: Uuid, + pr_number: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + issue_pull_request::Entity::delete_many() + .filter(issue_pull_request::Column::Issue.eq(issue.id)) + .filter(issue_pull_request::Column::Repo.eq(repo_id)) + .filter(issue_pull_request::Column::Number.eq(pr_number)) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + let _ = self + .project_log_activity( + project.id, + Some(repo_id), + user_uid, + ActivityLogParams { + event_type: "pr_issue_unlink".to_string(), + title: format!( + "{} unlinked PR #{} from issue #{}", + user_uid, pr_number, issue_number + ), + repo_id: Some(repo_id), + content: None, + event_id: Some(issue.id), + event_sub_id: Some(issue_number), + metadata: Some(serde_json::json!({ + "pr_number": pr_number, + "pr_repo": repo_id, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/issue/reaction.rs b/libs/service/issue/reaction.rs new file mode 100644 index 0000000..6e3c4ed --- /dev/null +++ b/libs/service/issue/reaction.rs @@ -0,0 +1,343 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::issues::{ReactionType, issue, issue_comment, issue_comment_reaction, issue_reaction}; +use models::projects::project_members; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use std::str::FromStr; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReactionAddRequest { + pub reaction: String, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReactionResponse { + pub user: Uuid, + pub reaction: String, + pub created_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReactionSummary { + pub reaction: String, + pub count: i64, + pub users: Vec, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReactionListResponse { + pub reactions: Vec, +} + +impl AppService { + /// List reactions summary on an issue. + pub async fn issue_reaction_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let reactions = issue_reaction::Entity::find() + .filter(issue_reaction::Column::Issue.eq(issue.id)) + .all(&self.db) + .await?; + + let summaries = self.aggregate_reactions(reactions); + Ok(ReactionListResponse { + reactions: summaries, + }) + } + + /// Add a reaction to an issue. + pub async fn issue_reaction_add( + &self, + project_name: String, + issue_number: i64, + request: ReactionAddRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let _ = ReactionType::from_str(&request.reaction) + .map_err(|_| AppError::BadRequest("Unknown reaction type".to_string()))?; + + let existing = issue_reaction::Entity::find() + .filter(issue_reaction::Column::Issue.eq(issue.id)) + .filter(issue_reaction::Column::User.eq(user_uid)) + .filter(issue_reaction::Column::Reaction.eq(&request.reaction)) + .one(&self.db) + .await?; + + if let Some(e) = existing { + return Ok(ReactionResponse { + user: e.user, + reaction: e.reaction, + created_at: e.created_at, + }); + } + + let now = Utc::now(); + let active = issue_reaction::ActiveModel { + issue: Set(issue.id), + user: Set(user_uid), + reaction: Set(request.reaction), + created_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + Ok(ReactionResponse { + user: model.user, + reaction: model.reaction, + created_at: model.created_at, + }) + } + + pub async fn issue_reaction_remove( + &self, + project_name: String, + issue_number: i64, + reaction: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + issue_reaction::Entity::delete_many() + .filter(issue_reaction::Column::Issue.eq(issue.id)) + .filter(issue_reaction::Column::User.eq(user_uid)) + .filter(issue_reaction::Column::Reaction.eq(reaction)) + .exec(&self.db) + .await?; + + Ok(()) + } + + /// List reactions on a comment. + pub async fn issue_comment_reaction_list( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + let reactions = issue_comment_reaction::Entity::find() + .filter(issue_comment_reaction::Column::Comment.eq(comment.id)) + .all(&self.db) + .await?; + + let summaries = self.aggregate_comment_reactions(reactions); + Ok(ReactionListResponse { + reactions: summaries, + }) + } + + /// Add a reaction to a comment. + pub async fn issue_comment_reaction_add( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + request: ReactionAddRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + let _ = ReactionType::from_str(&request.reaction) + .map_err(|_| AppError::BadRequest("Unknown reaction type".to_string()))?; + + let existing = issue_comment_reaction::Entity::find() + .filter(issue_comment_reaction::Column::Comment.eq(comment.id)) + .filter(issue_comment_reaction::Column::User.eq(user_uid)) + .filter(issue_comment_reaction::Column::Reaction.eq(&request.reaction)) + .one(&self.db) + .await?; + + if let Some(e) = existing { + return Ok(ReactionResponse { + user: e.user, + reaction: e.reaction, + created_at: e.created_at, + }); + } + + let now = Utc::now(); + let active = issue_comment_reaction::ActiveModel { + comment: Set(comment.id), + user: Set(user_uid), + reaction: Set(request.reaction), + created_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + Ok(ReactionResponse { + user: model.user, + reaction: model.reaction, + created_at: model.created_at, + }) + } + + pub async fn issue_comment_reaction_remove( + &self, + project_name: String, + issue_number: i64, + comment_id: i64, + reaction: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let comment = issue_comment::Entity::find_by_id(comment_id) + .filter(issue_comment::Column::Issue.eq(issue.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + issue_comment_reaction::Entity::delete_many() + .filter(issue_comment_reaction::Column::Comment.eq(comment.id)) + .filter(issue_comment_reaction::Column::User.eq(user_uid)) + .filter(issue_comment_reaction::Column::Reaction.eq(reaction)) + .exec(&self.db) + .await?; + + Ok(()) + } + + fn aggregate_reactions(&self, reactions: Vec) -> Vec { + use std::collections::HashMap; + let mut map: HashMap)> = HashMap::new(); + for r in reactions { + let entry = map.entry(r.reaction.clone()).or_insert_with(|| (0, vec![])); + entry.0 += 1; + entry.1.push(r.user); + } + map.into_iter() + .map(|(reaction, (count, users))| ReactionSummary { + reaction, + count, + users, + }) + .collect() + } + + fn aggregate_comment_reactions( + &self, + reactions: Vec, + ) -> Vec { + use std::collections::HashMap; + let mut map: HashMap)> = HashMap::new(); + for r in reactions { + let entry = map.entry(r.reaction.clone()).or_insert_with(|| (0, vec![])); + entry.0 += 1; + entry.1.push(r.user); + } + map.into_iter() + .map(|(reaction, (count, users))| ReactionSummary { + reaction, + count, + users, + }) + .collect() + } +} diff --git a/libs/service/issue/repo.rs b/libs/service/issue/repo.rs new file mode 100644 index 0000000..d9c3b1f --- /dev/null +++ b/libs/service/issue/repo.rs @@ -0,0 +1,150 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::issues::{issue, issue_repo}; +use models::projects::project_members; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct IssueLinkRepoRequest { + pub repo: Uuid, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueRepoResponse { + pub issue: Uuid, + pub repo: Uuid, + pub relation_at: chrono::DateTime, +} + +impl AppService { + /// List repos linked to an issue. + pub async fn issue_repo_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let repos = issue_repo::Entity::find() + .filter(issue_repo::Column::Issue.eq(issue.id)) + .all(&self.db) + .await?; + + let responses: Vec = repos + .into_iter() + .map(|r| IssueRepoResponse { + issue: r.issue, + repo: r.repo, + relation_at: r.relation_at, + }) + .collect(); + + Ok(responses) + } + + /// Link a repo to an issue. + pub async fn issue_repo_link( + &self, + project_name: String, + issue_number: i64, + request: IssueLinkRepoRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let existing = issue_repo::Entity::find() + .filter(issue_repo::Column::Issue.eq(issue.id)) + .filter(issue_repo::Column::Repo.eq(request.repo)) + .one(&self.db) + .await?; + if existing.is_some() { + return Err(AppError::BadRequest( + "Repo already linked to this issue".to_string(), + )); + } + + let now = Utc::now(); + let active = issue_repo::ActiveModel { + issue: Set(issue.id), + repo: Set(request.repo), + relation_at: Set(now), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + Ok(IssueRepoResponse { + issue: model.issue, + repo: model.repo, + relation_at: model.relation_at, + }) + } + + /// Unlink a repo from an issue. + pub async fn issue_repo_unlink( + &self, + project_name: String, + issue_number: i64, + repo_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + issue_repo::Entity::delete_many() + .filter(issue_repo::Column::Issue.eq(issue.id)) + .filter(issue_repo::Column::Repo.eq(repo_id)) + .exec(&self.db) + .await?; + + self.invalidate_issue_cache(project.id, issue_number).await; + + Ok(()) + } +} diff --git a/libs/service/issue/subscriber.rs b/libs/service/issue/subscriber.rs new file mode 100644 index 0000000..e70aedb --- /dev/null +++ b/libs/service/issue/subscriber.rs @@ -0,0 +1,227 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::issues::{issue, issue_subscriber}; +use models::projects::project_members; +use models::users::user; +use sea_orm::*; +use serde::Serialize; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueSubscriberResponse { + pub issue: Uuid, + pub user_id: Uuid, + pub username: String, + pub subscribed: bool, + pub created_at: chrono::DateTime, +} + +impl AppService { + /// List subscribers for an issue. + pub async fn issue_subscriber_list( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + if let Some(uid) = ctx.user() { + self.check_project_access(project.id, uid).await?; + } + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let subscribers = issue_subscriber::Entity::find() + .filter(issue_subscriber::Column::Issue.eq(issue.id)) + .filter(issue_subscriber::Column::Subscribed.eq(true)) + .all(&self.db) + .await?; + + let user_ids: Vec = subscribers.iter().map(|s| s.user).collect(); + let users = if user_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = subscribers + .into_iter() + .filter_map(|s| { + let username = users.iter().find(|u| u.uid == s.user)?.username.clone(); + Some(IssueSubscriberResponse { + issue: s.issue, + user_id: s.user, + username, + subscribed: s.subscribed, + created_at: s.created_at, + }) + }) + .collect(); + + Ok(responses) + } + + /// Subscribe the current user to an issue. + pub async fn issue_subscribe( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + let now = Utc::now(); + + // Upsert: update existing record or create new + let existing = issue_subscriber::Entity::find() + .filter(issue_subscriber::Column::Issue.eq(issue.id)) + .filter(issue_subscriber::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + let model = if let Some(existing) = existing { + let mut active: issue_subscriber::ActiveModel = existing.into(); + active.subscribed = Set(true); + active.update(&self.db).await? + } else { + let active = issue_subscriber::ActiveModel { + issue: Set(issue.id), + user: Set(user_uid), + subscribed: Set(true), + created_at: Set(now), + ..Default::default() + }; + active.insert(&self.db).await? + }; + + let username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + + let response = Ok(IssueSubscriberResponse { + issue: model.issue, + user_id: model.user, + username: username.clone(), + subscribed: model.subscribed, + created_at: model.created_at, + }); + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_subscribe".to_string(), + title: format!("{} subscribed to issue #{}", username.clone(), issue_number), + repo_id: None, + content: None, + event_id: Some(model.issue), + event_sub_id: Some(issue_number), + metadata: None, + is_private: false, + }, + ) + .await; + + response + } + + /// Unsubscribe the current user from an issue. + pub async fn issue_unsubscribe( + &self, + project_name: String, + issue_number: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let issue = issue::Entity::find() + .filter(issue::Column::Project.eq(project.id)) + .filter(issue::Column::Number.eq(issue_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Issue not found".to_string()))?; + + // Soft-delete: set subscribed = false + let existing = issue_subscriber::Entity::find() + .filter(issue_subscriber::Column::Issue.eq(issue.id)) + .filter(issue_subscriber::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + if let Some(existing) = existing { + let mut active: issue_subscriber::ActiveModel = existing.into(); + active.subscribed = Set(false); + active.update(&self.db).await?; + } + + let subscriber_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_else(|| user_uid.to_string()); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + ActivityLogParams { + event_type: "issue_unsubscribe".to_string(), + title: format!( + "{} unsubscribed from issue #{}", + subscriber_username, issue_number + ), + repo_id: None, + content: None, + event_id: Some(issue.id), + event_sub_id: Some(issue_number), + metadata: None, + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/lib.rs b/libs/service/lib.rs new file mode 100644 index 0000000..e91eb76 --- /dev/null +++ b/libs/service/lib.rs @@ -0,0 +1,217 @@ +use std::sync::Arc; + +use ::agent::task::service::TaskService; +use avatar::AppAvatar; +use config::AppConfig; +use db::cache::AppCache; +use db::database::AppDatabase; +use email::AppEmail; +use queue::{ + start_email_worker, EmailEnvelope, EmailSendFn, EmailSendFut, GetRedis, MessageProducer, RedisFuture, + RedisPubSub, +}; +use room::metrics::RoomMetrics; +use room::RoomService; +use serde::{Deserialize, Serialize}; +use slog::{Drain, OwnedKVList, Record}; +use utoipa::ToSchema; +use ws_token::WsTokenService; + +#[derive(Clone)] +pub struct AppService { + pub db: AppDatabase, + pub config: AppConfig, + pub cache: AppCache, + pub email: AppEmail, + pub logs: slog::Logger, + pub avatar: AppAvatar, + pub room: RoomService, + pub ws_token: Arc, + pub queue_producer: MessageProducer, +} + +impl AppService { + pub async fn start_room_workers( + &self, + shutdown_rx: tokio::sync::broadcast::Receiver<()>, + log: slog::Logger, + ) -> anyhow::Result<()> { + self.room.start_workers(shutdown_rx, log).await + } + + pub fn build_slog_logger(level: &str) -> slog::Logger { + let level_filter = match level { + "trace" => 0usize, + "debug" => 1usize, + "info" => 2usize, + "warn" => 3usize, + "error" => 4usize, + _ => 2usize, + }; + + struct StderrDrain(usize); + + impl Drain for StderrDrain { + type Ok = (); + type Err = (); + #[inline] + fn log(&self, record: &Record, _logger: &OwnedKVList) -> Result<(), ()> { + let slog_level = match record.level() { + slog::Level::Trace => 0, + slog::Level::Debug => 1, + slog::Level::Info => 2, + slog::Level::Warning => 3, + slog::Level::Error => 4, + slog::Level::Critical => 5, + }; + if slog_level < self.0 { + return Ok(()); + } + let _ = eprintln!( + "{} [{}] {}:{} - {}", + chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S%.3fZ"), + record.level().to_string(), + record + .file() + .rsplit_once('/') + .map(|(_, s)| s) + .unwrap_or(record.file()), + record.line(), + record.msg(), + ); + Ok(()) + } + } + + let drain = StderrDrain(level_filter); + let drain = std::sync::Mutex::new(drain); + let drain = slog::Fuse::new(drain); + slog::Logger::root(drain, slog::o!()) + } + + pub async fn new(config: AppConfig) -> anyhow::Result { + let db = AppDatabase::init(&config).await?; + let cache = AppCache::init(&config).await?; + let email = AppEmail::init(&config).await?; + let avatar = AppAvatar::init(&config).await?; + + let log_level = config.log_level().unwrap_or_else(|_| "info".to_string()); + let logs = Self::build_slog_logger(&log_level); + + // Build get_redis closure for MessageProducer + let get_redis: Arc< + dyn Fn() -> tokio::task::JoinHandle> + + Send + + Sync, + > = Arc::new({ + let pool = cache.redis_pool().clone(); + move || { + let pool = pool.clone(); + tokio::spawn(async move { pool.get().await.map_err(|e| anyhow::anyhow!("{}", e)) }) + } + }); + + let redis_pubsub = Some(RedisPubSub { + get_redis: get_redis.clone(), + log: logs.clone(), + }); + + let message_producer = + MessageProducer::new(get_redis.clone(), redis_pubsub.clone(), 10000, logs.clone()); + + // Build RoomService + let task_service = Arc::new(TaskService::new(db.clone())); + let room_metrics = Arc::new(RoomMetrics::default()); + let room_manager = Arc::new(room::connection::RoomConnectionManager::new( + room_metrics.clone(), + )); + + let redis_url = config + .redis_urls() + .ok() + .and_then(|urls| urls.first().cloned()) + .unwrap_or_else(|| "redis://127.0.0.1:6379".to_string()); + + let room = RoomService::new( + db.clone(), + cache.clone(), + message_producer.clone(), + room_manager, + redis_url, + None, + Some(task_service.clone()), + logs.clone(), + None, + ); + + // Build WsTokenService + let ws_token = Arc::new(WsTokenService::new(get_redis.clone())); + + Ok(Self { + db, + config, + cache, + email, + logs, + avatar, + room, + ws_token, + queue_producer: message_producer, + }) + } + + pub async fn start_email_workers( + &self, + shutdown_rx: tokio::sync::broadcast::Receiver<()>, + ) -> anyhow::Result<()> { + let get_redis_fn = self.queue_producer.get_redis.clone(); + let get_redis: GetRedis = Arc::new(move || -> RedisFuture { + let get_redis_fn = get_redis_fn.clone(); + Box::pin(async move { get_redis_fn().await? }) + }); + + let email = self.email.clone(); + let logs = self.logs.clone(); + let send_fn: EmailSendFn = Arc::new(move |envelopes: Vec| -> EmailSendFut { + let email = email.clone(); + let logs = logs.clone(); + Box::pin(async move { + for envelope in envelopes { + let to = envelope.to.clone(); + let msg = email::EmailMessage { + to: envelope.to, + subject: envelope.subject, + body: envelope.body, + }; + if let Err(e) = email.send(msg).await { + slog::error!(logs, "email send failed"; "to" => to, "error" => %e); + } + } + Ok(()) + }) + }); + + start_email_worker(get_redis, send_fn, shutdown_rx, self.logs.clone()).await; + Ok(()) + } +} + +pub mod agent; +pub mod auth; +pub mod error; +pub mod git; +pub mod issue; +pub mod project; +pub mod pull_request; +pub mod search; +pub mod skill; +pub mod user; +pub mod utils; +pub mod workspace; +pub mod ws_token; + +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct Pager { + pub page: i64, + pub par_page: i64, +} diff --git a/libs/service/project/activity.rs b/libs/service/project/activity.rs new file mode 100644 index 0000000..d5bb5a3 --- /dev/null +++ b/libs/service/project/activity.rs @@ -0,0 +1,445 @@ +use crate::AppService; +use crate::error::AppError; +use models::DateTimeUtc; +use models::projects::MemberRole; +use models::projects::project_activity; +use models::projects::project_members; +use models::repos::repo; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ActivityParams { + pub event_type: Option, + pub start_date: Option, + pub end_date: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ActivityLogParams { + pub event_type: String, + pub title: String, + pub repo_id: Option, + pub content: Option, + pub event_id: Option, + pub event_sub_id: Option, + pub metadata: Option, + pub is_private: bool, +} + +use utoipa::__dev::ComposeSchema; +use utoipa::ToSchema; +use utoipa::openapi::schema::{ObjectBuilder, Type}; +use utoipa::openapi::{KnownFormat, SchemaFormat}; + +/// Role-based visibility level for an activity. +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +#[serde(rename_all = "snake_case")] +pub enum ActivityVisibility { + /// Visible to all project members (public activities). + Public, + /// Visible only to admins and owners (private activities). + Private, +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct ActivityLogResponse { + pub id: i64, + pub project_uid: Uuid, + pub repo_uid: Option, + pub actor_uid: Uuid, + pub event_type: String, + pub event_id: Option, + pub event_sub_id: Option, + pub title: String, + pub content: Option, + pub metadata: Option, + pub is_private: bool, + /// Visibility level based on `is_private` flag. + pub visibility: ActivityVisibility, + pub created_at: DateTimeUtc, +} + +impl ActivityLogResponse { + /// Apply role-based content filtering. + /// Admins/owners see everything. + /// Members see redacted content/metadata for sensitive event types. + fn apply_role_filter(self, user_role: &Option) -> ActivityLogResponse { + let is_admin = matches!(user_role, Some(MemberRole::Owner) | Some(MemberRole::Admin)); + + let is_sensitive = matches!( + self.event_type.as_str(), + "member_role_change" + | "member_remove" + | "member_invite" + | "invitation_cancelled" + | "join_request_approve" + | "join_request_reject" + | "join_request_cancel" + ); + + if !is_admin && is_sensitive { + return ActivityLogResponse { + content: None, + metadata: None, + ..self + }; + } + self + } +} + +impl ComposeSchema for ActivityLogResponse { + fn compose( + _: Vec>, + ) -> utoipa::openapi::RefOr { + utoipa::openapi::RefOr::T(utoipa::openapi::Schema::Object( + ObjectBuilder::new() + .property( + "id", + ObjectBuilder::new().schema_type(Type::Integer).format(Some( + utoipa::openapi::schema::SchemaFormat::KnownFormat( + utoipa::openapi::schema::KnownFormat::Int64, + ), + )), + ) + .property( + "project_uid", + ObjectBuilder::new() + .schema_type(Type::String) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Uuid))), + ) + .property( + "repo_uid", + ObjectBuilder::new() + .schema_type(Type::String) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Uuid))), + ) + .property( + "actor_uid", + ObjectBuilder::new() + .schema_type(Type::String) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Uuid))), + ) + .property("event_type", ObjectBuilder::new().schema_type(Type::String)) + .property("title", ObjectBuilder::new().schema_type(Type::String)) + .property("content", ObjectBuilder::new().schema_type(Type::String)) + .property( + "is_private", + ObjectBuilder::new().schema_type(Type::Boolean), + ) + .property("visibility", ObjectBuilder::new().schema_type(Type::String)) + .property("created_at", ObjectBuilder::new().schema_type(Type::String)) + .required("id") + .required("project_uid") + .required("actor_uid") + .required("event_type") + .required("title") + .required("is_private") + .required("visibility") + .required("created_at") + .into(), + )) + } +} + +impl ToSchema for ActivityLogResponse {} + +#[derive(Serialize, Deserialize, Clone, Debug, utoipa::ToSchema)] +pub struct ActivityLogListResponse { + pub logs: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, + /// The current user's role in the project. + pub user_role: Option, +} + +impl From for ActivityLogResponse { + fn from(log: project_activity::Model) -> Self { + ActivityLogResponse { + id: log.id, + project_uid: log.project, + repo_uid: log.repo, + actor_uid: log.actor, + event_type: log.event_type, + event_id: log.event_id, + event_sub_id: log.event_sub_id, + title: log.title, + content: log.content, + metadata: log.metadata, + is_private: log.is_private, + visibility: if log.is_private { + ActivityVisibility::Private + } else { + ActivityVisibility::Public + }, + created_at: log.created_at, + } + } +} + +impl AppService { + pub async fn project_log_activity( + &self, + project_id: Uuid, + repo_id: Option, + actor_uid: Uuid, + params: ActivityLogParams, + ) -> Result { + let actor_username = user::Entity::find() + .filter(user::Column::Uid.eq(actor_uid)) + .one(&self.db) + .await + .ok() + .and_then(|u| u.map(|x| x.username)); + + let repo_name = match repo_id { + Some(rid) => repo::Entity::find() + .filter(repo::Column::Id.eq(rid)) + .one(&self.db) + .await + .ok() + .and_then(|r| r.map(|x| x.repo_name)), + None => None, + }; + + // Merge username and repo_name into metadata + let metadata = { + let mut meta = params.metadata.unwrap_or_default(); + if let Some(obj) = meta.as_object_mut() { + if let Some(ref username) = actor_username { + obj.insert("username".to_string(), serde_json::json!(username)); + } + if let Some(ref name) = repo_name { + obj.insert("repo_name".to_string(), serde_json::json!(name)); + } + } + meta + }; + + let log = project_activity::ActiveModel { + project: Set(project_id), + repo: Set(repo_id), + actor: Set(actor_uid), + event_type: Set(params.event_type), + event_id: Set(params.event_id), + event_sub_id: Set(params.event_sub_id), + title: Set(params.title), + content: Set(params.content), + metadata: Set(Some(metadata)), + is_private: Set(params.is_private), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + let created_log = log + .insert(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + Ok(ActivityLogResponse::from(created_log)) + } + + /// Get project activity feed with pagination and filtering. + /// Only returns activities the user has permission to see. + pub async fn project_get_activities( + &self, + project_name: String, + page: Option, + per_page: Option, + params: Option, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + // Check if user has access to the project + self.check_project_access(project.id, user_uid).await?; + + // Get user's role in the project + let user_role = match project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await + { + Ok(Some(m)) => m.scope_role().ok(), + Ok(None) => None, + Err(e) => { + slog::warn!( + self.logs, + "failed to look up project member for activity log: {}", + e + ); + None + } + }; + + let is_admin = matches!(user_role, Some(MemberRole::Owner) | Some(MemberRole::Admin)); + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + // Build the query + let mut query = project_activity::Entity::find() + .filter(project_activity::Column::Project.eq(project.id)) + .order_by_desc(project_activity::Column::CreatedAt); + + // Non-admin members can only see public activities + if !is_admin { + query = query.filter(project_activity::Column::IsPrivate.eq(false)); + } + + // Apply filters if provided + if let Some(ref p) = params { + if let Some(ref event_type) = p.event_type { + query = query.filter(project_activity::Column::EventType.eq(event_type.clone())); + } + if let Some(ref start_date) = p.start_date { + if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(start_date) { + query = query.filter( + project_activity::Column::CreatedAt.gte(dt.with_timezone(&chrono::Utc)), + ); + } + } + if let Some(ref end_date) = p.end_date { + if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(end_date) { + query = query.filter( + project_activity::Column::CreatedAt.lte(dt.with_timezone(&chrono::Utc)), + ); + } + } + } + + let total = query + .clone() + .count(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + let logs = query + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + // Apply role-based content filtering (redact sensitive fields for non-admin members) + let logs: Vec = logs + .into_iter() + .map(|log| ActivityLogResponse::from(log).apply_role_filter(&user_role)) + .collect(); + + // Enrich metadata with actor username and repo_name + let actor_uids: Vec = logs.iter().map(|l| l.actor_uid).collect(); + let username_map = if actor_uids.is_empty() { + std::collections::HashMap::new() + } else { + let users = user::Entity::find() + .filter(user::Column::Uid.is_in(actor_uids)) + .all(&self.db) + .await?; + users.into_iter().map(|u| (u.uid, u.username)).collect() + }; + + let repo_uids: Vec = logs.iter().filter_map(|l| l.repo_uid).collect(); + let repo_name_map = if repo_uids.is_empty() { + std::collections::HashMap::new() + } else { + let repos = repo::Entity::find() + .filter(repo::Column::Id.is_in(repo_uids)) + .all(&self.db) + .await?; + repos.into_iter().map(|r| (r.id, r.repo_name)).collect() + }; + + let logs: Vec = logs + .into_iter() + .map(|mut log| { + let meta = log + .metadata + .get_or_insert_with(|| serde_json::Value::Object(serde_json::Map::new())); + if let Some(obj) = meta.as_object_mut() { + if let Some(username) = username_map.get(&log.actor_uid) { + obj.insert("username".to_string(), serde_json::json!(username)); + } + if let Some(ref repo_uid) = log.repo_uid { + if let Some(repo_name) = repo_name_map.get(repo_uid) { + obj.insert("repo_name".to_string(), serde_json::json!(repo_name)); + } + } + } + log + }) + .collect(); + + Ok(ActivityLogListResponse { + logs, + total, + page, + per_page, + user_role, + }) + } + + /// Get a single activity log by ID. + pub async fn project_get_activity( + &self, + activity_id: i64, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let log = project_activity::Entity::find_by_id(activity_id) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .ok_or(AppError::NotFound("Activity not found".to_string()))?; + + // Check if user has access to the project + self.check_project_access(log.project, user_uid).await?; + + // Enrich metadata with actor username and repo_name + let mut log = ActivityLogResponse::from(log); + if let Ok(Some(user_record)) = user::Entity::find() + .filter(user::Column::Uid.eq(log.actor_uid)) + .one(&self.db) + .await + { + if let Some(ref mut meta) = log.metadata { + if let Some(obj) = meta.as_object_mut() { + obj.insert( + "username".to_string(), + serde_json::json!(user_record.username), + ); + } + } else { + log.metadata = Some(serde_json::json!({ "username": user_record.username })); + } + } + + if let Some(ref repo_uid) = log.repo_uid { + if let Ok(Some(repo_record)) = repo::Entity::find() + .filter(repo::Column::Id.eq(*repo_uid)) + .one(&self.db) + .await + { + if let Some(ref mut meta) = log.metadata { + if let Some(obj) = meta.as_object_mut() { + obj.insert( + "repo_name".to_string(), + serde_json::json!(repo_record.repo_name), + ); + } + } else { + log.metadata = Some(serde_json::json!({ "repo_name": repo_record.repo_name })); + } + } + } + + Ok(log) + } +} diff --git a/libs/service/project/audit.rs b/libs/service/project/audit.rs new file mode 100644 index 0000000..7af2853 --- /dev/null +++ b/libs/service/project/audit.rs @@ -0,0 +1,183 @@ +use crate::AppService; +use crate::error::AppError; +use models::DateTimeUtc; +use models::projects::{MemberRole, project_audit_log}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct AuditLogParams { + pub action: String, + pub details: Option, +} + +use utoipa::__dev::ComposeSchema; +use utoipa::ToSchema; +use utoipa::openapi::schema::{ObjectBuilder, Type}; +use utoipa::openapi::{KnownFormat, SchemaFormat}; + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct AuditLogResponse { + pub id: i64, + pub project_uid: Uuid, + pub actor_uid: Uuid, + pub action: String, + pub details: Option, + pub created_at: DateTimeUtc, +} + +impl ComposeSchema for AuditLogResponse { + fn compose( + _: Vec>, + ) -> utoipa::openapi::RefOr { + utoipa::openapi::RefOr::T(utoipa::openapi::Schema::Object( + ObjectBuilder::new() + .property( + "id", + ObjectBuilder::new().schema_type(Type::Integer).format(Some( + utoipa::openapi::schema::SchemaFormat::KnownFormat( + utoipa::openapi::schema::KnownFormat::Int64, + ), + )), + ) + .property( + "project_uid", + ObjectBuilder::new() + .schema_type(Type::String) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Uuid))), + ) + .property( + "actor_uid", + ObjectBuilder::new() + .schema_type(Type::String) + .format(Some(SchemaFormat::KnownFormat(KnownFormat::Uuid))), + ) + .property("action", ObjectBuilder::new().schema_type(Type::String)) + .property("details", ObjectBuilder::new()) + .required("id") + .required("project_uid") + .required("actor_uid") + .required("action") + .into(), + )) + } +} + +impl ToSchema for AuditLogResponse {} + +#[derive(Serialize, Deserialize, Clone, Debug, utoipa::ToSchema)] +pub struct AuditLogListResponse { + pub logs: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +impl From for AuditLogResponse { + fn from(log: project_audit_log::Model) -> Self { + AuditLogResponse { + id: log.id, + project_uid: log.project, + actor_uid: log.actor, + action: log.action, + details: log.details, + created_at: log.created_at, + } + } +} + +impl AppService { + pub async fn project_log_audit( + &self, + project_name: String, + params: AuditLogParams, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set(params.action), + details: Set(params.details), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + let created_log = log + .insert(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + Ok(AuditLogResponse::from(created_log)) + } + + pub async fn project_get_audit_logs( + &self, + project_name: String, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + self.utils_check_project_permission( + &project.id, + user_uid, + &[MemberRole::Owner, MemberRole::Admin], + ) + .await?; + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let logs = project_audit_log::Entity::find() + .filter(project_audit_log::Column::Project.eq(project.id)) + .order_by_desc(project_audit_log::Column::CreatedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await + .map_err(|_| AppError::UserNotFound)?; + + let total = project_audit_log::Entity::find() + .filter(project_audit_log::Column::Project.eq(project.id)) + .count(&self.db) + .await + .map_err(|_| AppError::UserNotFound)?; + + let logs = logs.into_iter().map(AuditLogResponse::from).collect(); + + Ok(AuditLogListResponse { + logs, + total, + page, + per_page, + }) + } + + pub async fn project_get_audit_log( + &self, + log_id: i64, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let log = project_audit_log::Entity::find_by_id(log_id) + .one(&self.db) + .await + .map_err(|_| AppError::UserNotFound)? + .ok_or(AppError::UserNotFound)?; + + self.utils_check_project_permission( + &log.project, + user_uid, + &[MemberRole::Owner, MemberRole::Admin], + ) + .await?; + + Ok(AuditLogResponse::from(log)) + } +} diff --git a/libs/service/project/avatar.rs b/libs/service/project/avatar.rs new file mode 100644 index 0000000..e029f6a --- /dev/null +++ b/libs/service/project/avatar.rs @@ -0,0 +1,59 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::audit::AuditLogParams; +use chrono::Utc; +use models::projects::{MemberRole, project}; +use sea_orm::prelude::Expr; +use sea_orm::*; +use serde_json::json; +use session::Session; + +impl AppService { + pub async fn project_avatar_upload( + &self, + ctx: &Session, + project_name: String, + file: Vec, + file_ext: String, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await?; + if role == MemberRole::Member { + return Err(AppError::NoPower); + } + let time = Utc::now(); + let file_name = format!("{}-{}", project.id, time); + self.avatar + .upload(file, file_name.clone(), &file_ext) + .await + .map_err(|e| AppError::AvatarUploadError(e.to_string()))?; + let static_url = self.config.static_domain().unwrap_or("/static".to_string()); + let file_url = format!("{}/{}", static_url, file_name); + project::Entity::update_many() + .filter(project::Column::Id.eq(project.id)) + .col_expr(project::Column::AvatarUrl, Expr::value(file_url.clone())) + .exec(&self.db) + .await?; + self.project_log_audit( + project_name, + AuditLogParams { + action: "update avatar".to_string(), + details: Some(json!({ + "file_name": file_name, + "file_url": file_url, + "project_name": project.name, + "project_uid": project.id, + "user_uid": user_uid, + })), + }, + ctx, + ) + .await?; + Ok(()) + } +} diff --git a/libs/service/project/billing.rs b/libs/service/project/billing.rs new file mode 100644 index 0000000..880d218 --- /dev/null +++ b/libs/service/project/billing.rs @@ -0,0 +1,182 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Datelike, NaiveDate, Utc}; +use models::Decimal; +use models::projects::{project_billing, project_billing_history}; +use sea_orm::sea_query::prelude::rust_decimal::prelude::ToPrimitive; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::{IntoParams, ToSchema}; +use uuid::Uuid; + +const DEFAULT_PROJECT_MONTHLY_CREDIT: f64 = 10.0; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct ProjectBillingCurrentResponse { + pub project_uid: Uuid, + pub currency: String, + pub monthly_quota: f64, + pub balance: f64, + pub month_used: f64, + pub cycle_start_utc: DateTime, + pub cycle_end_utc: DateTime, + pub updated_at: DateTime, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct ProjectBillingHistoryQuery { + pub page: Option, + pub per_page: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct ProjectBillingHistoryItem { + pub uid: Uuid, + pub project_uid: Uuid, + pub user_uid: Option, + pub amount: f64, + pub currency: String, + pub reason: String, + pub extra: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct ProjectBillingHistoryResponse { + pub page: u64, + pub per_page: u64, + pub total: u64, + pub list: Vec, +} + +impl AppService { + pub async fn project_billing_current( + &self, + ctx: &Session, + project_name: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + self.check_project_access(project.id, user_uid).await?; + + let now_utc = Utc::now(); + let (month_start, next_month_start) = utc_month_bounds(now_utc)?; + let billing = self + .ensure_project_billing(project.id, Some(user_uid)) + .await?; + + let month_used = project_billing_history::Entity::find() + .filter(project_billing_history::Column::Project.eq(project.id)) + .filter(project_billing_history::Column::Reason.eq("ai_usage_monthly")) + .filter(project_billing_history::Column::CreatedAt.gte(month_start)) + .filter(project_billing_history::Column::CreatedAt.lt(next_month_start)) + .order_by_desc(project_billing_history::Column::CreatedAt) + .one(&self.db) + .await? + .map(|m| m.amount) + .unwrap_or(Decimal::ZERO); + + Ok(ProjectBillingCurrentResponse { + project_uid: project.id, + currency: billing.currency, + monthly_quota: DEFAULT_PROJECT_MONTHLY_CREDIT, + balance: billing.balance.to_f64().unwrap_or_default(), + month_used: month_used.to_f64().unwrap_or_default(), + cycle_start_utc: month_start, + cycle_end_utc: next_month_start, + updated_at: billing.updated_at, + created_at: billing.created_at, + }) + } + + pub async fn project_billing_history( + &self, + ctx: &Session, + project_name: String, + query: ProjectBillingHistoryQuery, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + self.check_project_access(project.id, user_uid).await?; + + let page = std::cmp::max(query.page.unwrap_or(1), 1); + let per_page = query.per_page.unwrap_or(20).clamp(1, 200); + + self.ensure_project_billing(project.id, Some(user_uid)) + .await?; + + let paginator = project_billing_history::Entity::find() + .filter(project_billing_history::Column::Project.eq(project.id)) + .order_by_desc(project_billing_history::Column::CreatedAt) + .paginate(&self.db, per_page); + let total = paginator.num_items().await?; + let rows = paginator.fetch_page(page - 1).await?; + + let list = rows + .into_iter() + .map(|x| ProjectBillingHistoryItem { + uid: x.uid, + project_uid: x.project, + user_uid: x.user, + amount: x.amount.to_f64().unwrap_or_default(), + currency: x.currency, + reason: x.reason, + extra: x.extra.map(|v| v.into()), + created_at: x.created_at, + }) + .collect(); + + Ok(ProjectBillingHistoryResponse { + page, + per_page, + total, + list, + }) + } + + pub async fn ensure_project_billing( + &self, + project_uid: Uuid, + user_uid: Option, + ) -> Result { + if let Some(billing) = project_billing::Entity::find_by_id(project_uid) + .one(&self.db) + .await? + { + return Ok(billing); + } + + let now_utc = Utc::now(); + let created = project_billing::ActiveModel { + project: Set(project_uid), + balance: Set(Decimal::from(DEFAULT_PROJECT_MONTHLY_CREDIT as i64)), + currency: Set("USD".to_string()), + user: Set(user_uid), + updated_at: Set(now_utc), + created_at: Set(now_utc), + ..Default::default() + }; + Ok(created.insert(&self.db).await?) + } +} + +fn utc_month_bounds(now_utc: DateTime) -> Result<(DateTime, DateTime), AppError> { + let year = now_utc.year(); + let month = now_utc.month(); + let month_start = NaiveDate::from_ymd_opt(year, month, 1) + .and_then(|d| d.and_hms_opt(0, 0, 0)) + .map(|d| chrono::TimeZone::from_utc_datetime(&Utc, &d)) + .ok_or_else(|| AppError::InternalServerError("Invalid UTC month start".to_string()))?; + let (next_year, next_month) = if month == 12 { + (year + 1, 1) + } else { + (year, month + 1) + }; + let next_month_start = NaiveDate::from_ymd_opt(next_year, next_month, 1) + .and_then(|d| d.and_hms_opt(0, 0, 0)) + .map(|d| chrono::TimeZone::from_utc_datetime(&Utc, &d)) + .ok_or_else(|| AppError::InternalServerError("Invalid UTC next month start".to_string()))?; + Ok((month_start, next_month_start)) +} diff --git a/libs/service/project/board.rs b/libs/service/project/board.rs new file mode 100644 index 0000000..104da45 --- /dev/null +++ b/libs/service/project/board.rs @@ -0,0 +1,496 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{project_board, project_board_card, project_board_column}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct CreateBoardParams { + pub name: String, + pub description: Option, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct UpdateBoardParams { + pub name: Option, + pub description: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BoardResponse { + pub id: Uuid, + pub project: Uuid, + pub name: String, + pub description: Option, + pub created_by: Uuid, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for BoardResponse { + fn from(m: project_board::Model) -> Self { + Self { + id: m.id, + project: m.project, + name: m.name, + description: m.description, + created_by: m.created_by, + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct CreateColumnParams { + pub name: String, + #[serde(default)] + pub position: i32, + pub wip_limit: Option, + pub color: Option, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct UpdateColumnParams { + pub name: Option, + pub position: Option, + pub wip_limit: Option, + pub color: Option, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ColumnResponse { + pub id: Uuid, + pub board: Uuid, + pub name: String, + pub position: i32, + pub wip_limit: Option, + pub color: Option, +} + +impl From for ColumnResponse { + fn from(m: project_board_column::Model) -> Self { + Self { + id: m.id, + board: m.board, + name: m.name, + position: m.position, + wip_limit: m.wip_limit, + color: m.color, + } + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct CreateCardParams { + pub column_id: Uuid, + pub title: String, + pub description: Option, + pub issue_id: Option, + pub assignee_id: Option, + pub due_date: Option>, + pub priority: Option, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct UpdateCardParams { + pub title: Option, + pub description: Option, + pub assignee_id: Option, + pub due_date: Option>, + pub priority: Option, +} + +#[derive(Debug, Clone, Deserialize, utoipa::ToSchema)] +pub struct MoveCardParams { + pub target_column_id: Uuid, + pub position: i32, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct CardResponse { + pub id: Uuid, + pub column: Uuid, + pub issue_id: Option, + pub project: Option, + pub title: String, + pub description: Option, + pub position: i32, + pub assignee_id: Option, + pub due_date: Option>, + pub priority: Option, + pub created_by: Uuid, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for CardResponse { + fn from(m: project_board_card::Model) -> Self { + Self { + id: m.id, + column: m.column, + issue_id: m.issue_id, + project: m.project, + title: m.title, + description: m.description, + position: m.position, + assignee_id: m.assignee_id, + due_date: m.due_date, + priority: m.priority, + created_by: m.created_by, + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct BoardWithColumnsResponse { + pub board: BoardResponse, + pub columns: Vec, +} + +#[derive(Debug, Clone, Serialize, utoipa::ToSchema)] +pub struct ColumnWithCardsResponse { + pub column: ColumnResponse, + pub cards: Vec, +} + +impl AppService { + /// List all boards for a project. + pub async fn board_list( + &self, + project_name: String, + ctx: &Session, + ) -> Result, AppError> { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + let boards = project_board::Entity::find() + .filter(project_board::Column::Project.eq(project.id)) + .order_by_asc(project_board::Column::CreatedAt) + .all(&self.db) + .await?; + Ok(boards.into_iter().map(BoardResponse::from).collect()) + } + + pub async fn board_get( + &self, + project_name: String, + board_id: Uuid, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let board = project_board::Entity::find_by_id(board_id) + .filter(project_board::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Board not found".to_string()))?; + + let columns = project_board_column::Entity::find() + .filter(project_board_column::Column::Board.eq(board_id)) + .order_by_asc(project_board_column::Column::Position) + .all(&self.db) + .await?; + + let column_ids: Vec = columns.iter().map(|c| c.id).collect(); + let cards = if column_ids.is_empty() { + vec![] + } else { + project_board_card::Entity::find() + .filter(project_board_card::Column::Column.is_in(column_ids)) + .order_by_asc(project_board_card::Column::Position) + .all(&self.db) + .await? + }; + + let columns_with_cards: Vec = columns + .into_iter() + .map(|c| { + let col_cards: Vec = cards + .iter() + .filter(|card| card.column == c.id) + .cloned() + .map(CardResponse::from) + .collect(); + ColumnWithCardsResponse { + column: ColumnResponse::from(c), + cards: col_cards, + } + }) + .collect(); + + Ok(BoardWithColumnsResponse { + board: BoardResponse::from(board), + columns: columns_with_cards, + }) + } + + /// Create a new board. + pub async fn board_create( + &self, + project_name: String, + params: CreateBoardParams, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + let now = Utc::now(); + + let active = project_board::ActiveModel { + id: Set(Uuid::new_v4()), + project: Set(project.id), + name: Set(params.name), + description: Set(params.description), + created_by: Set(user_uid), + created_at: Set(now), + updated_at: Set(now), + }; + let board = active.insert(&self.db).await?; + Ok(BoardResponse::from(board)) + } + + /// Update a board. + pub async fn board_update( + &self, + project_name: String, + board_id: Uuid, + params: UpdateBoardParams, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let board = project_board::Entity::find_by_id(board_id) + .filter(project_board::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Board not found".to_string()))?; + + let mut active: project_board::ActiveModel = board.into(); + if let Some(v) = params.name { + active.name = Set(v); + } + if params.description.is_some() { + active.description = Set(params.description); + } + active.updated_at = Set(Utc::now()); + let updated = active.update(&self.db).await?; + Ok(BoardResponse::from(updated)) + } + + /// Delete a board. + pub async fn board_delete( + &self, + project_name: String, + board_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let deleted = project_board::Entity::delete_by_id(board_id) + .filter(project_board::Column::Project.eq(project.id)) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::NotFound("Board not found".to_string())); + } + Ok(()) + } + + /// Create a column on a board. + pub async fn column_create( + &self, + project_name: String, + board_id: Uuid, + params: CreateColumnParams, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let board = project_board::Entity::find_by_id(board_id) + .filter(project_board::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Board not found".to_string()))?; + + let active = project_board_column::ActiveModel { + id: Set(Uuid::new_v4()), + board: Set(board.id), + name: Set(params.name), + position: Set(params.position), + wip_limit: Set(params.wip_limit), + color: Set(params.color), + }; + let column = active.insert(&self.db).await?; + Ok(ColumnResponse::from(column)) + } + + /// Update a column. + pub async fn column_update( + &self, + project_name: String, + column_id: Uuid, + params: UpdateColumnParams, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let _project = self.utils_find_project_by_name(project_name).await?; + + let column = project_board_column::Entity::find_by_id(column_id) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Column not found".to_string()))?; + + let mut active: project_board_column::ActiveModel = column.into(); + if let Some(v) = params.name { + active.name = Set(v); + } + if let Some(v) = params.position { + active.position = Set(v); + } + if params.wip_limit.is_some() { + active.wip_limit = Set(params.wip_limit); + } + if params.color.is_some() { + active.color = Set(params.color); + } + let updated = active.update(&self.db).await?; + Ok(ColumnResponse::from(updated)) + } + + /// Delete a column. + pub async fn column_delete( + &self, + project_name: String, + column_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let _project = self.utils_find_project_by_name(project_name).await?; + + let deleted = project_board_column::Entity::delete_by_id(column_id) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::NotFound("Column not found".to_string())); + } + Ok(()) + } + + /// Create a card in a column. + pub async fn card_create( + &self, + project_name: String, + params: CreateCardParams, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + let now = Utc::now(); + + let active = project_board_card::ActiveModel { + id: Set(Uuid::new_v4()), + column: Set(params.column_id), + issue_id: Set(params.issue_id), + project: Set(Some(project.id)), + title: Set(params.title), + description: Set(params.description), + position: Set(0), + assignee_id: Set(params.assignee_id), + due_date: Set(params.due_date), + priority: Set(params.priority), + created_by: Set(user_uid), + created_at: Set(now), + updated_at: Set(now), + }; + let card = active.insert(&self.db).await?; + Ok(CardResponse::from(card)) + } + + /// Update a card. + pub async fn card_update( + &self, + project_name: String, + card_id: Uuid, + params: UpdateCardParams, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let _project = self.utils_find_project_by_name(project_name).await?; + + let card = project_board_card::Entity::find_by_id(card_id) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Card not found".to_string()))?; + + let mut active: project_board_card::ActiveModel = card.into(); + if let Some(v) = params.title { + active.title = Set(v); + } + if params.description.is_some() { + active.description = Set(params.description); + } + if params.assignee_id.is_some() { + active.assignee_id = Set(params.assignee_id); + } + if params.due_date.is_some() { + active.due_date = Set(params.due_date); + } + if params.priority.is_some() { + active.priority = Set(params.priority); + } + active.updated_at = Set(Utc::now()); + let updated = active.update(&self.db).await?; + Ok(CardResponse::from(updated)) + } + + /// Move a card to a different column and/or position. + pub async fn card_move( + &self, + project_name: String, + card_id: Uuid, + params: MoveCardParams, + ctx: &Session, + ) -> Result { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let _project = self.utils_find_project_by_name(project_name).await?; + + let card = project_board_card::Entity::find_by_id(card_id) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Card not found".to_string()))?; + + let mut active: project_board_card::ActiveModel = card.into(); + active.column = Set(params.target_column_id); + active.position = Set(params.position); + active.updated_at = Set(Utc::now()); + let updated = active.update(&self.db).await?; + Ok(CardResponse::from(updated)) + } + + /// Delete a card. + pub async fn card_delete( + &self, + project_name: String, + card_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let _ = ctx.user().ok_or(AppError::Unauthorized)?; + let _project = self.utils_find_project_by_name(project_name).await?; + + let deleted = project_board_card::Entity::delete_by_id(card_id) + .exec(&self.db) + .await?; + if deleted.rows_affected == 0 { + return Err(AppError::NotFound("Card not found".to_string())); + } + Ok(()) + } +} diff --git a/libs/service/project/can_use.rs b/libs/service/project/can_use.rs new file mode 100644 index 0000000..4549e9f --- /dev/null +++ b/libs/service/project/can_use.rs @@ -0,0 +1,42 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{project, project_members}; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Debug, ToSchema)] +pub struct CanUseProjectItem { + pub name: String, + pub avatar_url: Option, + pub display_name: String, + pub uid: Uuid, + pub is_public: bool, +} +impl AppService { + pub async fn can_use_project(&self, ctx: &Session) -> Result, AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let member = project_members::Entity::find() + .filter(project_members::Column::User.eq(user_uid)) + .all(&self.db) + .await?; + let mut result = vec![]; + for member in member { + if let Some(project) = project::Entity::find_by_id(member.project) + .one(&self.db) + .await? + { + result.push(CanUseProjectItem { + name: project.name, + avatar_url: project.avatar_url, + display_name: project.display_name, + uid: project.id, + is_public: project.is_public, + }); + } + } + Ok(result) + } +} diff --git a/libs/service/project/info.rs b/libs/service/project/info.rs new file mode 100644 index 0000000..d4c7e80 --- /dev/null +++ b/libs/service/project/info.rs @@ -0,0 +1,143 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::{MemberRole, project_label, project_like, project_members, project_watch}; +use models::system::label; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct ProjectInfoRelational { + pub uid: Uuid, + pub name: String, + pub display_name: String, + pub avatar_url: Option, + pub description: Option, + pub is_public: bool, + pub created_at: DateTime, + pub updated_at: DateTime, + + pub created_by: Uuid, + pub created_username_name: String, + pub created_display_name: Option, + pub created_avatar_url: Option, + + pub member_count: i64, + pub like_count: i64, + pub watch_count: i64, + pub keys: Vec, + pub labels: Vec, + pub role: Option, + pub is_like: bool, + pub is_watch: bool, +} + +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct ProjectInfoKeyValue { + pub key: String, + pub value: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)] +pub struct ProjectInfoLabel { + pub name: String, + pub color: String, +} + +impl AppService { + pub async fn project_info( + &self, + ctx: &Session, + project_name: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + let is_member = self + .utils_project_context_role(&ctx, project.name.clone()) + .await + .ok(); + if !project.is_public && is_member.is_none() { + return Err(AppError::NotFound("Project not found".to_string())); + } + + let creator = user::Entity::find() + .filter(user::Column::Uid.eq(project.created_by)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let member_count = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let like_count = project_like::Entity::find() + .filter(project_like::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let watch_count = project_watch::Entity::find() + .filter(project_watch::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let labels_model = project_label::Entity::find() + .filter(project_label::Column::Project.eq(project.id)) + .all(&self.db) + .await? + .iter() + .map(|x| x.label) + .collect::>(); + let labes = label::Entity::find() + .filter(label::Column::Id.is_in(labels_model)) + .all(&self.db) + .await?; + + let labels = labes + .into_iter() + .map(|l| ProjectInfoLabel { + name: l.name, + color: l.color, + }) + .collect(); + let is_like = project_like::Entity::find() + .filter(project_like::Column::Project.eq(project.id)) + .filter(project_like::Column::User.eq(user_uid)) + .count(&self.db) + .await? + > 0; + let is_watch = project_watch::Entity::find() + .filter(project_watch::Column::Project.eq(project.id)) + .filter(project_watch::Column::User.eq(user_uid)) + .count(&self.db) + .await? + > 0; + + Ok(ProjectInfoRelational { + uid: project.id, + name: project.name, + display_name: project.display_name, + avatar_url: project.avatar_url, + description: project.description, + is_public: project.is_public, + created_at: project.created_at, + updated_at: project.updated_at, + created_by: project.created_by, + created_username_name: creator.username, + created_display_name: creator.display_name, + created_avatar_url: creator.avatar_url, + member_count: member_count as i64, + like_count: like_count as i64, + watch_count: watch_count as i64, + keys: Vec::new(), + labels, + role: is_member, + is_like, + is_watch, + }) + } +} diff --git a/libs/service/project/init.rs b/libs/service/project/init.rs new file mode 100644 index 0000000..2b64e8a --- /dev/null +++ b/libs/service/project/init.rs @@ -0,0 +1,140 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::Decimal; +use models::projects::{MemberRole, project, project_audit_log, project_billing, project_members}; +use models::workspaces::workspace_membership; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ProjectInitParams { + pub name: String, + pub description: Option, + pub is_public: bool, + /// Optional workspace slug to associate this project with. + pub workspace_slug: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ProjectInitResponse { + pub params: ProjectInitParams, + pub project: ProjectModel, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ProjectModel { + pub uid: Uuid, + pub name: String, + pub display_name: String, + pub avatar_url: Option, + pub description: Option, + pub is_public: bool, + pub workspace_id: Option, + pub created_by: Uuid, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl AppService { + pub async fn project_init( + &self, + ctx: &Session, + params: ProjectInitParams, + ) -> Result { + let inner = params.clone(); + if let Ok(_) = self.utils_find_project_by_name(params.name.clone()).await { + return Err(AppError::ProjectNameAlreadyExists); + } + let user = ctx.user().ok_or(AppError::Unauthorized)?; + let user = self.utils_find_user_by_uid(user).await?; + + // Resolve workspace if provided + let workspace_id = match ¶ms.workspace_slug { + Some(slug) => { + let ws = self.utils_find_workspace_by_slug(slug.clone()).await?; + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user.uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await?; + if membership.is_none() { + return Err(AppError::NotWorkspaceMember); + } + Some(ws.id) + } + None => None, + }; + + let project_uid = Uuid::now_v7(); + let txn = self.db.begin().await?; + let project = project::ActiveModel { + id: Set(project_uid), + name: Set(params.name.clone()), + display_name: Set(params.name), + avatar_url: Set(None), + description: Set(params.description), + is_public: Set(params.is_public), + created_by: Set(user.uid), + workspace_id: Set(workspace_id), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + let _project = project.insert(&txn).await?; + + let project_member = project_members::ActiveModel { + id: Default::default(), + project: Set(_project.id), + user: Set(user.uid), + scope: Set(MemberRole::Owner.to_string()), + joined_at: Set(Utc::now()), + }; + project_member.insert(&txn).await?; + + let billing = project_billing::ActiveModel { + project: Set(_project.id), + balance: Set(Decimal::from(200i64)), + currency: Set("USD".to_string()), + user: Set(Some(user.uid)), + updated_at: Set(Utc::now()), + created_at: Set(Utc::now()), + ..Default::default() + }; + billing.insert(&txn).await?; + + let log = project_audit_log::ActiveModel { + project: Set(_project.id), + actor: Set(user.uid), + action: Set("project_create".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": _project.name.clone(), + "project_uid": _project.id, + "is_public": _project.is_public, + "description": _project.description.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + Ok(ProjectInitResponse { + params: inner, + project: ProjectModel { + uid: _project.id, + name: _project.name.clone(), + display_name: _project.display_name.clone(), + avatar_url: _project.avatar_url.clone(), + description: _project.description.clone(), + is_public: _project.is_public, + workspace_id: _project.workspace_id, + created_by: _project.created_by, + created_at: _project.created_at, + updated_at: _project.updated_at, + }, + }) + } +} diff --git a/libs/service/project/invitation.rs b/libs/service/project/invitation.rs new file mode 100644 index 0000000..3d180c8 --- /dev/null +++ b/libs/service/project/invitation.rs @@ -0,0 +1,503 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::{ + MemberRole, project_audit_log, project_member_invitations, project_members, +}; +use models::users::{user, user_email}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct InvitationResponse { + pub project_uid: Uuid, + pub user_uid: Uuid, + pub invited_by: Uuid, + pub scope: String, + pub accepted: bool, + pub accepted_at: Option>, + pub rejected: bool, + pub rejected_at: Option>, + pub created_at: DateTime, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct InvitationListResponse { + pub invitations: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +impl From for InvitationResponse { + fn from(invitation: project_member_invitations::Model) -> Self { + InvitationResponse { + project_uid: invitation.project, + user_uid: invitation.user, + invited_by: invitation.invited_by, + scope: invitation.scope, + accepted: invitation.accepted, + accepted_at: invitation.accepted_at, + rejected: invitation.rejected, + rejected_at: invitation.rejected_at, + created_at: invitation.created_at, + } + } +} + +impl AppService { + pub async fn project_get_invitations( + &self, + project_name: String, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let _user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let role = self + .utils_project_context_role(&ctx, project.name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + + if role != MemberRole::Owner && role != MemberRole::Admin { + return Err(AppError::NoPower); + } + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let invitations = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .order_by_desc(project_member_invitations::Column::CreatedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let invitations = invitations + .into_iter() + .map(InvitationResponse::from) + .collect(); + + Ok(InvitationListResponse { + invitations, + total, + page, + per_page, + }) + } + + pub async fn project_get_my_invitations( + &self, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let invitations = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::User.eq(user_uid)) + .filter(project_member_invitations::Column::Accepted.eq(false)) + .filter(project_member_invitations::Column::Rejected.eq(false)) + .order_by_desc(project_member_invitations::Column::CreatedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::User.eq(user_uid)) + .filter(project_member_invitations::Column::Accepted.eq(false)) + .filter(project_member_invitations::Column::Rejected.eq(false)) + .count(&self.db) + .await?; + + let invitations = invitations + .into_iter() + .map(InvitationResponse::from) + .collect(); + + Ok(InvitationListResponse { + invitations, + total, + page, + per_page, + }) + } + + pub async fn project_invite_user( + &self, + project_name: String, + invitee_email: String, + scope: MemberRole, + ctx: &Session, + ) -> Result<(), AppError> { + let inviter_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + + if role != MemberRole::Owner && role != MemberRole::Admin { + return Err(AppError::NoPower); + } + let target_user = user_email::Entity::find() + .filter(user_email::Column::Email.eq(invitee_email.clone())) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + let user = user::Entity::find_by_id(target_user.user) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + let target_uid = user.uid; + + let existing_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(target_uid)) + .one(&self.db) + .await?; + + if existing_member.is_some() { + return Err(AppError::InternalServerError( + "User is already a member of this project".to_string(), + )); + } + + let existing_invitation = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .filter(project_member_invitations::Column::User.eq(target_uid)) + .filter(project_member_invitations::Column::Accepted.eq(false)) + .filter(project_member_invitations::Column::Rejected.eq(false)) + .one(&self.db) + .await?; + + if existing_invitation.is_some() { + return Err(AppError::InternalServerError( + "An invitation already exists for this user".to_string(), + )); + } + + let txn = self.db.begin().await.map_err(|_| AppError::InternalError)?; + + let invitation = project_member_invitations::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(target_uid), + invited_by: Set(inviter_uid), + scope: Set(scope.clone().to_string()), + accepted: Set(false), + accepted_at: Set(None), + rejected: Set(false), + rejected_at: Set(None), + created_at: Set(Utc::now()), + }; + invitation.insert(&txn).await?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(inviter_uid), + action: Set("invite_user".to_string()), + details: Set(Some(serde_json::json!({ + "invitee_uid": target_uid, + "scope": format!("{:?}", scope), + "project_name": project.name.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + let _ = self + .project_log_activity( + project.id, + None, + inviter_uid, + super::activity::ActivityLogParams { + event_type: "member_invite".to_string(), + title: format!("{} invited {} to the project", inviter_uid, user.username), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "invitee_uid": target_uid, + "invitee_username": user.username, + "scope": format!("{:?}", scope), + })), + is_private: false, + }, + ) + .await; + + let inviter = user::Entity::find_by_id(inviter_uid) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let envelope = queue::EmailEnvelope { + id: Uuid::new_v4(), + to: invitee_email, + subject: format!("You've been invited to join project: {}", project.name), + body: format!( + "Hello {},\n\n\ + {} has invited you to join the project \"{}\" with the role of {:?}.\n\n\ + Please log in to your account to accept or decline this invitation.\n\n\ + Project: {}\n\ + Role: {:?}\n\ + Invited by: {}\n\n\ + Best regards,\n\ + GitDataAI Team", + user.username, + inviter.username, + project.name, + scope, + project.name, + scope, + inviter.username + ), + created_at: chrono::Utc::now(), + }; + + if let Err(_e) = self.queue_producer.publish_email(envelope).await { + // Failed to queue invitation email + } + Ok(()) + } + + pub async fn project_accept_invitation( + &self, + project_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + let invitation = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .filter(project_member_invitations::Column::Project.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "No pending invitation found for this project".to_string(), + ))?; + if invitation.accepted || invitation.rejected { + return Err(AppError::InternalServerError( + "Invitation already processed".to_string(), + )); + } + let txn = self.db.begin().await.map_err(|_| AppError::InternalError)?; + let mut active_invitation: project_member_invitations::ActiveModel = + invitation.clone().into(); + active_invitation.accepted = Set(true); + active_invitation.accepted_at = Set(Some(Utc::now())); + active_invitation.update(&txn).await?; + let existing_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&txn) + .await?; + if existing_member.is_none() { + let member = project_members::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(user_uid), + scope: Set(invitation.scope.clone()), + joined_at: Set(Utc::now()), + }; + member.insert(&txn).await?; + } + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("accept_invitation".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + "scope": format!("{:?}", invitation.scope), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + txn.commit().await?; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_else(|| user_uid.to_string()); + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "member_join".to_string(), + title: format!("{} joined the project", actor_username), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "scope": format!("{:?}", invitation.scope), + })), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn project_reject_invitation( + &self, + project_name: String, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + let invitation = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .filter(project_member_invitations::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "No pending invitation found for this project".to_string(), + ))?; + if invitation.accepted || invitation.rejected { + return Err(AppError::InternalServerError( + "Invitation already processed".to_string(), + )); + } + let txn = self.db.begin().await.map_err(|_| AppError::InternalError)?; + let mut active_invitation: project_member_invitations::ActiveModel = invitation.into(); + active_invitation.rejected = Set(true); + active_invitation.rejected_at = Set(Some(Utc::now())); + active_invitation.update(&txn).await?; + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("reject_invitation".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + txn.commit().await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "invitation_rejected".to_string(), + title: format!("{} rejected invitation to join the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn project_cancel_invitation( + &self, + project_name: String, + invitee_uid: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + let invitation = project_member_invitations::Entity::find() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .filter(project_member_invitations::Column::User.eq(invitee_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound( + "No invitation found for this user".to_string(), + ))?; + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + if role != MemberRole::Owner + && role != MemberRole::Admin + && invitation.invited_by != user_uid + { + return Err(AppError::NoPower); + } + let txn = self.db.begin().await.map_err(|_| AppError::InternalError)?; + project_member_invitations::Entity::delete_many() + .filter(project_member_invitations::Column::Project.eq(project.id)) + .filter(project_member_invitations::Column::User.eq(invitee_uid)) + .exec(&txn) + .await?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("cancel_invitation".to_string()), + details: Set(Some(serde_json::json!({ + "invitee_uid": invitee_uid, + "project_name": project.name.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + txn.commit().await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "invitation_cancelled".to_string(), + title: format!("{} cancelled invitation for user {}", user_uid, invitee_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "invitee_uid": invitee_uid, + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + Ok(()) + } +} diff --git a/libs/service/project/join_answers.rs b/libs/service/project/join_answers.rs new file mode 100644 index 0000000..245e543 --- /dev/null +++ b/libs/service/project/join_answers.rs @@ -0,0 +1,134 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::{ + project_audit_log, project_member_join_answers, project_member_join_request, +}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct AnswerRequest { + pub question: String, + pub answer: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct JoinAnswerResponse { + pub question: String, + pub answer: String, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct JoinAnswersListResponse { + pub request_id: i64, + pub project_uid: String, + pub answers: Vec, +} + +impl AppService { + pub async fn project_get_join_answers( + &self, + project_name: String, + request_id: i64, + ctx: &Session, + ) -> Result { + let _user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + // Verify the request exists and belongs to this project + let join_request = project_member_join_request::Entity::find_by_id(request_id) + .filter(project_member_join_request::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Join request not found".to_string()))?; + + let answers = project_member_join_answers::Entity::find() + .filter(project_member_join_answers::Column::RequestId.eq(request_id)) + .order_by_asc(project_member_join_answers::Column::Id) + .all(&self.db) + .await?; + + let answer_responses = answers + .into_iter() + .map(|a| JoinAnswerResponse { + question: a.question, + answer: a.answer, + created_at: a.created_at, + }) + .collect(); + + Ok(JoinAnswersListResponse { + request_id: join_request.id, + project_uid: project.id.to_string(), + answers: answer_responses, + }) + } + + pub async fn project_submit_join_answers( + &self, + project_name: String, + request_id: i64, + answers: Vec, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + // Verify the request exists, belongs to this project, and belongs to the user + let join_request = project_member_join_request::Entity::find_by_id(request_id) + .filter(project_member_join_request::Column::Project.eq(project.id)) + .filter(project_member_join_request::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Join request not found".to_string()))?; + + if join_request.status != "pending" { + return Err(AppError::InternalServerError( + "Cannot update answers for processed request".to_string(), + )); + } + + // Delete existing answers + project_member_join_answers::Entity::delete_many() + .filter(project_member_join_answers::Column::RequestId.eq(request_id)) + .exec(&self.db) + .await?; + + let txn = self.db.begin().await?; + + // Insert new answers + for answer in answers { + let answer_model = project_member_join_answers::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(user_uid), + request_id: Set(request_id), + question: Set(answer.question), + answer: Set(answer.answer), + created_at: Set(Utc::now()), + }; + answer_model.insert(&txn).await?; + } + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("submit_join_answers".to_string()), + details: Set(Some(serde_json::json!({ + "request_id": request_id, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + Ok(()) + } +} diff --git a/libs/service/project/join_request.rs b/libs/service/project/join_request.rs new file mode 100644 index 0000000..891749a --- /dev/null +++ b/libs/service/project/join_request.rs @@ -0,0 +1,506 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::{ + MemberRole, project_audit_log, project_member_join_answers, project_member_join_request, + project_member_join_settings, project_members, +}; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +use super::join_answers::AnswerRequest; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct JoinRequestResponse { + pub id: i64, + pub project_uid: Uuid, + pub user_uid: Uuid, + pub username: String, + pub status: String, + pub message: Option, + pub processed_by: Option, + pub processed_at: Option>, + pub reject_reason: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct JoinRequestListResponse { + pub requests: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct SubmitJoinRequest { + pub message: Option, + pub answers: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct ProcessJoinRequest { + pub approve: bool, + pub scope: MemberRole, + pub reject_reason: Option, +} + +impl AppService { + pub async fn project_get_join_requests( + &self, + project_name: String, + status: Option, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let _user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + + if role != MemberRole::Owner && role != MemberRole::Admin { + return Err(AppError::NoPower); + } + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let mut query = project_member_join_request::Entity::find() + .filter(project_member_join_request::Column::Project.eq(project.id)); + + if let Some(s) = status { + query = query.filter(project_member_join_request::Column::Status.eq(s)); + } + + let requests = query + .order_by_desc(project_member_join_request::Column::CreatedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = project_member_join_request::Entity::find() + .filter(project_member_join_request::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let user_ids: Vec = requests.iter().map(|r| r.user).collect(); + let users_data = if user_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + }; + + let join_requests = requests + .into_iter() + .filter_map(|r| { + let username = users_data + .iter() + .find(|u| u.uid == r.user) + .map(|u| u.username.clone())?; + Some(JoinRequestResponse { + id: r.id, + project_uid: r.project, + user_uid: r.user, + username, + status: r.status, + message: r.message, + processed_by: r.processed_by, + processed_at: r.processed_at, + reject_reason: r.reject_reason, + created_at: r.created_at, + }) + }) + .collect(); + + Ok(JoinRequestListResponse { + requests: join_requests, + total, + page, + per_page, + }) + } + + pub async fn project_submit_join_request( + &self, + project_name: String, + request: SubmitJoinRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let existing_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + if existing_member.is_some() { + return Err(AppError::InternalServerError( + "Already a member of this project".to_string(), + )); + } + + let existing_request = project_member_join_request::Entity::find() + .filter(project_member_join_request::Column::Project.eq(project.id)) + .filter(project_member_join_request::Column::User.eq(user_uid)) + .filter(project_member_join_request::Column::Status.eq("pending")) + .one(&self.db) + .await?; + + if existing_request.is_some() { + return Err(AppError::InternalServerError( + "Already have a pending join request".to_string(), + )); + } + + // Get join settings + let settings = project_member_join_settings::Entity::find() + .filter(project_member_join_settings::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + // Clone message for audit log before moving + let message = request.message.clone(); + + let txn = self.db.begin().await?; + + let new_request = project_member_join_request::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(user_uid), + status: Set("pending".to_string()), + message: Set(request.message), + processed_by: Set(None), + processed_at: Set(None), + reject_reason: Set(None), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + let request_model = new_request.insert(&txn).await?; + let request_id = request_model.id; + + // Save answers if questions are required + if let Some(ref s) = settings { + if s.require_questions { + for answer in request.answers { + let answer_model = project_member_join_answers::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(user_uid), + request_id: Set(request_id), + question: Set(answer.question), + answer: Set(answer.answer), + created_at: Set(Utc::now()), + }; + answer_model.insert(&txn).await?; + } + } + } + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("submit_join_request".to_string()), + details: Set(Some(serde_json::json!({ + "request_id": request_id, + "message": message, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "join_request_submit".to_string(), + title: format!("{} requested to join the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(request_id), + metadata: Some(serde_json::json!({ + "request_id": request_id, + })), + is_private: false, + }, + ) + .await; + + Ok(request_id) + } + + pub async fn project_process_join_request( + &self, + project_name: String, + request_id: i64, + process: ProcessJoinRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + + if role != MemberRole::Owner && role != MemberRole::Admin { + return Err(AppError::NoPower); + } + + let join_request = project_member_join_request::Entity::find_by_id(request_id) + .filter(project_member_join_request::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Join request not found".to_string()))?; + + if join_request.status != "pending" { + return Err(AppError::InternalServerError( + "Join request already processed".to_string(), + )); + } + + // Clone values before moving into ActiveModel + let request_user = join_request.user; + let action = if process.approve { + "approve_join_request" + } else { + "reject_join_request" + }; + let reject_reason = process.reject_reason.clone(); + + let txn = self.db.begin().await?; + + let mut active_request: project_member_join_request::ActiveModel = join_request.into(); + if process.approve { + active_request.status = Set("approved".to_string()); + active_request.processed_by = Set(Some(user_uid)); + active_request.processed_at = Set(Some(Utc::now())); + + // Add user as member + let member = project_members::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(request_user), + scope: Set(process.scope.to_string()), + joined_at: Set(Utc::now()), + }; + member.insert(&txn).await?; + } else { + active_request.status = Set("rejected".to_string()); + active_request.processed_by = Set(Some(user_uid)); + active_request.processed_at = Set(Some(Utc::now())); + active_request.reject_reason = Set(process.reject_reason); + } + active_request.updated_at = Set(Utc::now()); + active_request.update(&txn).await?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set(action.to_string()), + details: Set(Some(serde_json::json!({ + "request_id": request_id, + "approved": process.approve, + "scope": process.scope.to_string(), + "reject_reason": reject_reason, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + let request_username = user::Entity::find_by_id(request_user) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_else(|| request_user.to_string()); + let event_type = if process.approve { + "join_request_approve" + } else { + "join_request_reject" + }; + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: event_type.to_string(), + title: if process.approve { + format!("{} approved {}'s join request", user_uid, request_username) + } else { + format!("{} rejected {}'s join request", user_uid, request_username) + }, + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(request_id), + metadata: Some(serde_json::json!({ + "request_id": request_id, + "approved": process.approve, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn project_cancel_join_request( + &self, + project_name: String, + request_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let join_request = project_member_join_request::Entity::find_by_id(request_id) + .filter(project_member_join_request::Column::Project.eq(project.id)) + .filter(project_member_join_request::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Join request not found".to_string()))?; + + if join_request.status != "pending" { + return Err(AppError::InternalServerError( + "Only pending requests can be cancelled".to_string(), + )); + } + + let txn = self.db.begin().await?; + + let mut active_request: project_member_join_request::ActiveModel = join_request.into(); + active_request.status = Set("cancelled".to_string()); + active_request.updated_at = Set(Utc::now()); + active_request.update(&txn).await?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("cancel_join_request".to_string()), + details: Set(Some(serde_json::json!({ + "request_id": request_id, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "join_request_cancel".to_string(), + title: format!("{} cancelled their join request", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(request_id), + metadata: Some(serde_json::json!({ + "request_id": request_id, + })), + is_private: false, + }, + ) + .await; + Ok(()) + } + + pub async fn project_get_my_join_requests( + &self, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let requests = project_member_join_request::Entity::find() + .filter(project_member_join_request::Column::User.eq(user_uid)) + .order_by_desc(project_member_join_request::Column::CreatedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = project_member_join_request::Entity::find() + .filter(project_member_join_request::Column::User.eq(user_uid)) + .count(&self.db) + .await?; + + let project_ids: Vec<_> = requests.iter().map(|r| r.project).collect(); + let projects = if project_ids.is_empty() { + vec![] + } else { + models::projects::project::Entity::find() + .filter(models::projects::project::Column::Id.is_in(project_ids)) + .all(&self.db) + .await? + }; + + let join_requests = requests + .into_iter() + .filter_map(|r| { + let project_name = projects + .iter() + .find(|p| p.id == r.project) + .map(|p| p.name.clone())?; + Some(JoinRequestResponse { + id: r.id, + project_uid: r.project, + user_uid: r.user, + username: project_name, + status: r.status, + message: r.message, + processed_by: r.processed_by, + processed_at: r.processed_at, + reject_reason: r.reject_reason, + created_at: r.created_at, + }) + }) + .collect(); + + Ok(JoinRequestListResponse { + requests: join_requests, + total, + page, + per_page, + }) + } +} diff --git a/libs/service/project/join_settings.rs b/libs/service/project/join_settings.rs new file mode 100644 index 0000000..5bd0032 --- /dev/null +++ b/libs/service/project/join_settings.rs @@ -0,0 +1,139 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{project_audit_log, project_member_join_settings}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct JoinSettingsResponse { + pub project_uid: String, + pub require_approval: bool, + pub require_questions: bool, + pub questions: serde_json::Value, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct UpdateJoinSettingsRequest { + pub require_approval: bool, + pub require_questions: bool, + pub questions: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize, utoipa::ToSchema)] +pub struct QuestionSchema { + pub question: String, +} + +impl AppService { + pub async fn project_get_join_settings( + &self, + project_name: String, + ctx: &Session, + ) -> Result { + let _user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let settings = project_member_join_settings::Entity::find() + .filter(project_member_join_settings::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + match settings { + Some(s) => Ok(JoinSettingsResponse { + project_uid: project.id.to_string(), + require_approval: s.require_approval, + require_questions: s.require_questions, + questions: s.questions, + }), + None => Ok(JoinSettingsResponse { + project_uid: project.id.to_string(), + require_approval: false, + require_questions: false, + questions: serde_json::json!([]), + }), + } + } + + pub async fn project_update_join_settings( + &self, + project_name: String, + request: UpdateJoinSettingsRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let role = self + .utils_project_context_role(&ctx, project_name.clone()) + .await + .map_err(|_| AppError::NoPower)?; + + if role != models::projects::MemberRole::Owner + && role != models::projects::MemberRole::Admin + { + return Err(AppError::NoPower); + } + + let txn = self.db.begin().await?; + + let existing = project_member_join_settings::Entity::find() + .filter(project_member_join_settings::Column::Project.eq(project.id)) + .one(&txn) + .await?; + + let questions_json: serde_json::Value = serde_json::json!( + request + .questions + .iter() + .map(|q| q.question.clone()) + .collect::>() + ); + + if let Some(settings) = existing { + let mut active: project_member_join_settings::ActiveModel = settings.into(); + active.require_approval = Set(request.require_approval); + active.require_questions = Set(request.require_questions); + active.questions = Set(questions_json.clone()); + active.updated_at = Set(Utc::now()); + active.update(&txn).await?; + } else { + let new_settings = project_member_join_settings::ActiveModel { + id: Default::default(), + project: Set(project.id), + require_approval: Set(request.require_approval), + require_questions: Set(request.require_questions), + questions: Set(questions_json.clone()), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + new_settings.insert(&txn).await?; + } + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("update_join_settings".to_string()), + details: Set(Some(serde_json::json!({ + "require_approval": request.require_approval, + "require_questions": request.require_questions, + "questions_count": request.questions.len(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + + txn.commit().await?; + + Ok(JoinSettingsResponse { + project_uid: project.id.to_string(), + require_approval: request.require_approval, + require_questions: request.require_questions, + questions: questions_json, + }) + } +} diff --git a/libs/service/project/labels.rs b/libs/service/project/labels.rs new file mode 100644 index 0000000..a340378 --- /dev/null +++ b/libs/service/project/labels.rs @@ -0,0 +1,354 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{MemberRole, project_audit_log, project_label}; +use models::system::label; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct CreateLabelParams { + pub name: String, + pub color: String, + pub description: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct UpdateLabelParams { + pub name: Option, + pub color: Option, + pub description: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct LabelResponse { + pub id: i64, + pub project_uid: Uuid, + pub name: String, + pub color: String, + pub description: Option, + pub created_at: chrono::DateTime, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct LabelListResponse { + pub labels: Vec, + pub total: usize, +} + +impl AppService { + pub async fn project_create_label( + &self, + project_name: String, + params: CreateLabelParams, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + self.utils_check_project_permission( + &project.id, + user_uid, + &[MemberRole::Admin, MemberRole::Owner], + ) + .await?; + + // Create the label in system::label table + let label_model = label::ActiveModel { + project: Set(project.id), + name: Set(params.name.clone()), + color: Set(params.color.clone()), + ..Default::default() + }; + + let created_system_label = label_model.insert(&self.db).await?; + + // Create the project-label relation + let project_label = project_label::ActiveModel { + project: Set(project.id), + label: Set(created_system_label.id), + relation_at: Set(Utc::now()), + ..Default::default() + }; + + let created_project_label = project_label.insert(&self.db).await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "label_create".to_string(), + title: format!("{} created label '{}'", user_uid, params.name), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(created_project_label.id), + metadata: Some(serde_json::json!({ + "label_id": created_project_label.id, + "label_name": params.name, + "color": params.color, + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("label_create".to_string()), + details: Set(Some(serde_json::json!({ + "label_id": created_project_label.id, + "label_name": params.name, + "color": params.color, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(LabelResponse { + id: created_project_label.id, + project_uid: project.id, + name: params.name, + color: params.color, + description: params.description, + created_at: created_project_label.relation_at, + }) + } + + pub async fn project_get_labels( + &self, + project_name: String, + ) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + + // Find all project-label relations + let project_label_relations = project_label::Entity::find() + .filter(project_label::Column::Project.eq(project.id)) + .all(&self.db) + .await?; + + if project_label_relations.is_empty() { + return Ok(LabelListResponse { + labels: vec![], + total: 0, + }); + } + + // Get all label IDs + let label_ids: Vec = project_label_relations.iter().map(|r| r.label).collect(); + + // Fetch label details + let labels = label::Entity::find() + .filter(label::Column::Id.is_in(label_ids)) + .all(&self.db) + .await?; + + let total = labels.len(); + let labels: Vec = project_label_relations + .into_iter() + .filter_map(|relation| { + labels + .iter() + .find(|l| l.id == relation.label) + .map(|l| LabelResponse { + id: relation.id, + project_uid: relation.project, + name: l.name.clone(), + color: l.color.clone(), + description: None, // system::label doesn't have description + created_at: relation.relation_at, + }) + }) + .collect(); + + Ok(LabelListResponse { labels, total }) + } + + pub async fn project_get_label(&self, label_id: i64) -> Result { + let project_label = project_label::Entity::find_by_id(label_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + let system_label = label::Entity::find_by_id(project_label.label) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + Ok(LabelResponse { + id: project_label.id, + project_uid: project_label.project, + name: system_label.name, + color: system_label.color, + description: None, + created_at: project_label.relation_at, + }) + } + + pub async fn project_update_label( + &self, + label_id: i64, + params: UpdateLabelParams, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let project_label = project_label::Entity::find_by_id(label_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + self.utils_check_project_permission( + &project_label.project, + user_uid, + &[MemberRole::Admin, MemberRole::Owner], + ) + .await?; + + let system_label = label::Entity::find_by_id(project_label.label) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + let mut active_label: label::ActiveModel = system_label.into(); + + let updated_name = params.name.is_some(); + let updated_color = params.color.is_some(); + let updated_description = params.description.is_some(); + + if let Some(name) = params.name { + active_label.name = Set(name); + } + if let Some(color) = params.color { + active_label.color = Set(color); + } + + let updated_system_label = active_label.update(&self.db).await?; + + let _ = self + .project_log_activity( + project_label.project, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "label_update".to_string(), + title: format!("{} updated label '{}'", user_uid, updated_system_label.name), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(label_id), + metadata: Some(serde_json::json!({ + "label_id": label_id, + "updated_fields": { + "name": updated_name, + "color": updated_color, + "description": updated_description, + } + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project_label.project), + actor: Set(user_uid), + action: Set("label_update".to_string()), + details: Set(Some(serde_json::json!({ + "label_id": label_id, + "updated_fields": { + "name": updated_name, + "color": updated_color, + "description": updated_description, + } + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(LabelResponse { + id: project_label.id, + project_uid: project_label.project, + name: updated_system_label.name, + color: updated_system_label.color, + description: params.description, + created_at: project_label.relation_at, + }) + } + + pub async fn project_delete_label(&self, label_id: i64, ctx: &Session) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let project_label = project_label::Entity::find_by_id(label_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Label not found".to_string()))?; + + self.utils_check_project_permission( + &project_label.project, + user_uid, + &[MemberRole::Admin, MemberRole::Owner], + ) + .await?; + + let system_label = label::Entity::find_by_id(project_label.label) + .one(&self.db) + .await?; + + let deleted_label_name = system_label + .as_ref() + .map(|l| l.name.clone()) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project_label.project, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "label_delete".to_string(), + title: format!("{} deleted label '{}'", user_uid, deleted_label_name), + repo_id: None, + content: None, + event_id: None, + event_sub_id: Some(label_id), + metadata: Some(serde_json::json!({ + "label_id": label_id, + "label_name": deleted_label_name, + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project_label.project), + actor: Set(user_uid), + action: Set("label_delete".to_string()), + details: Set(Some(serde_json::json!({ + "label_id": label_id, + "label_name": system_label.as_ref().map(|l| l.name.clone()), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + project_label::Entity::delete_by_id(label_id) + .exec(&self.db) + .await?; + + // Also delete the system label if it exists + if let Some(sl) = system_label { + label::Entity::delete_by_id(sl.id).exec(&self.db).await?; + } + + Ok(()) + } +} diff --git a/libs/service/project/like.rs b/libs/service/project/like.rs new file mode 100644 index 0000000..fc0bcc9 --- /dev/null +++ b/libs/service/project/like.rs @@ -0,0 +1,199 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{project_audit_log, project_like}; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, utoipa::ToSchema)] +pub struct LikeUserInfo { + pub uid: Uuid, + pub username: String, + pub avatar_url: String, +} + +impl AppService { + pub async fn project_like(&self, ctx: &Session, project_name: String) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let like_exists = project_like::Entity::find() + .filter(project_like::Column::User.eq(user_uid)) + .filter(project_like::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + if like_exists.is_some() { + return Err(AppError::BadRequest( + "Already liked this project".to_string(), + )); + } + + project_like::Entity::insert(project_like::ActiveModel { + project: Set(project.id), + user: Set(user_uid), + created_at: Set(chrono::Utc::now()), + }) + .exec(&self.db) + .await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "project_star".to_string(), + title: format!("{} starred the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("project_like".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + }))), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } + + pub async fn project_unlike( + &self, + ctx: &Session, + project_name: String, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let like_exists = project_like::Entity::find() + .filter(project_like::Column::User.eq(user_uid)) + .filter(project_like::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + if like_exists.is_none() { + return Err(AppError::NotFound("Like not found".to_string())); + } + + project_like::Entity::delete_many() + .filter(project_like::Column::User.eq(user_uid)) + .filter(project_like::Column::Project.eq(project.id)) + .exec(&self.db) + .await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "project_unstar".to_string(), + title: format!("{} unstarred the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("project_unlike".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + }))), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } + + pub async fn project_is_like( + &self, + ctx: &Session, + project_name: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let like_exists = project_like::Entity::find() + .filter(project_like::Column::User.eq(user_uid)) + .filter(project_like::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + Ok(like_exists.is_some()) + } + + pub async fn project_likes(&self, project_name: String) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + + let likes = project_like::Entity::find() + .filter(project_like::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + Ok(likes) + } + + pub async fn project_like_user_list( + &self, + project_name: String, + pager: crate::Pager, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + + let likes = project_like::Entity::find() + .filter(project_like::Column::Project.eq(project.id)) + .order_by_desc(project_like::Column::CreatedAt) + .limit(pager.par_page as u64) + .offset(((pager.page - 1) * pager.par_page) as u64) + .all(&self.db) + .await?; + + let user_uids: Vec = likes.into_iter().map(|like| like.user).collect(); + + let users = if user_uids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_uids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| LikeUserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url.unwrap_or_default(), + }) + .collect() + }; + + Ok(users) + } +} diff --git a/libs/service/project/members.rs b/libs/service/project/members.rs new file mode 100644 index 0000000..441f658 --- /dev/null +++ b/libs/service/project/members.rs @@ -0,0 +1,318 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{MemberRole, project_audit_log, project_members}; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ToSchema)] +pub struct MemberInfo { + pub user_id: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub scope: MemberRole, + pub joined_at: chrono::DateTime, +} + +#[derive(Clone, Debug, Deserialize, Serialize, ToSchema)] +pub struct MemberListResponse { + pub members: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +#[derive(Clone, Debug, Deserialize, Serialize, ToSchema)] +pub struct UpdateMemberRoleRequest { + pub user_id: Uuid, + pub scope: MemberRole, +} + +impl AppService { + pub async fn project_get_members( + &self, + project_name: String, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let _requester_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let members = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .order_by_asc(project_members::Column::JoinedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let user_ids: Vec = members.iter().map(|m| m.user).collect(); + + let users_data = if user_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await? + }; + + let member_infos: Vec = members + .into_iter() + .filter_map(|member| { + let role = member.scope_role().ok()?; + users_data + .iter() + .find(|u| u.uid == member.user) + .map(|user| MemberInfo { + user_id: user.uid, + username: user.username.clone(), + display_name: user.display_name.clone(), + avatar_url: user.avatar_url.clone(), + scope: role.clone(), + joined_at: member.joined_at, + }) + }) + .collect(); + + Ok(MemberListResponse { + members: member_infos, + total, + page, + per_page, + }) + } + + pub async fn project_update_member_role( + &self, + project_name: String, + request: UpdateMemberRoleRequest, + ctx: &Session, + ) -> Result<(), AppError> { + let actor_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let actor_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(actor_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + let actor_role = actor_member + .scope_role() + .map_err(|_| AppError::RoleParseError)?; + + if actor_role != MemberRole::Owner && actor_role != MemberRole::Admin { + return Err(AppError::NoPower); + } + + let target_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(request.user_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Member not found".to_string()))?; + + let target_role = target_member + .scope_role() + .map_err(|_| AppError::RoleParseError)?; + + if target_role == MemberRole::Owner { + return Err(AppError::NoPower); + } + + if request.scope == MemberRole::Admin && actor_role != MemberRole::Owner { + return Err(AppError::NoPower); + } + + if request.scope == MemberRole::Owner { + return Err(AppError::NoPower); + } + + let mut active_member: project_members::ActiveModel = target_member.into(); + active_member.scope = Set(request.scope.to_string()); + active_member.update(&self.db).await?; + + let actor_username = user::Entity::find_by_id(actor_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let target_username = user::Entity::find_by_id(request.user_id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project.id, + None, + actor_uid, + super::activity::ActivityLogParams { + event_type: "member_role_change".to_string(), + title: format!( + "{} changed {}'s role to {}", + actor_username, target_username, request.scope + ), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "target_user_id": request.user_id.to_string(), + "new_role": request.scope.to_string(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(actor_uid), + action: Set("update_member_role".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name, + "target_user_id": request.user_id, + "new_role": request.scope.to_string(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } + + pub async fn project_remove_member( + &self, + project_name: String, + user_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let actor_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let actor_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(actor_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + let actor_role = actor_member + .scope_role() + .map_err(|_| AppError::RoleParseError)?; + + if actor_role != MemberRole::Owner && actor_role != MemberRole::Admin { + return Err(AppError::NoPower); + } + + let target_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Member not found".to_string()))?; + + let target_role = target_member + .scope_role() + .map_err(|_| AppError::RoleParseError)?; + + if target_role == MemberRole::Owner { + return Err(AppError::NoPower); + } + + if actor_role == MemberRole::Admin && target_role == MemberRole::Admin { + return Err(AppError::NoPower); + } + + project_members::Entity::delete_many() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_id)) + .exec(&self.db) + .await?; + + let actor_username = user::Entity::find_by_id(actor_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let target_username = user::Entity::find_by_id(user_id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + project.id, + None, + actor_uid, + super::activity::ActivityLogParams { + event_type: "member_remove".to_string(), + title: format!( + "{} removed {} from the project", + actor_username, target_username + ), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "removed_user_id": user_id.to_string(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(actor_uid), + action: Set("remove_member".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name, + "removed_user_id": user_id, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } +} diff --git a/libs/service/project/mod.rs b/libs/service/project/mod.rs new file mode 100644 index 0000000..435d063 --- /dev/null +++ b/libs/service/project/mod.rs @@ -0,0 +1,20 @@ +pub mod activity; +pub mod audit; +pub mod avatar; +pub mod billing; +pub mod board; +pub mod can_use; +pub mod info; +pub mod init; +pub mod invitation; +pub mod join_answers; +pub mod join_request; +pub mod join_settings; +pub mod labels; +pub mod like; +pub mod members; +pub mod repo; +pub mod settings; +pub mod standard; +pub mod transfer_repo; +pub mod watch; diff --git a/libs/service/project/repo.rs b/libs/service/project/repo.rs new file mode 100644 index 0000000..52549c6 --- /dev/null +++ b/libs/service/project/repo.rs @@ -0,0 +1,337 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::{Project, project_members}; +use models::repos::repo::{ + ActiveModel as RepoActiveModel, Column as RepoColumn, Entity as RepoEntity, +}; +use models::repos::{Repo, RepoBranch, RepoCommit, RepoStar, RepoTag, RepoWatch}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use std::collections::HashMap; +use std::path::PathBuf; +use utoipa::{IntoParams, ToSchema}; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct ProjectRepositoryQuery { + pub limit: Option, + pub cursor: Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ToSchema)] +pub struct ProjectRepositoryItem { + pub uid: Uuid, + pub repo_name: String, + pub description: Option, + pub default_branch: String, + pub project_name: String, + pub is_private: bool, + pub commit_count: i64, + pub branch_count: i64, + pub tag_count: i64, + pub star_count: i64, + pub watch_count: i64, + pub last_commit_at: Option>, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ToSchema)] +pub struct ProjectRepositoryPagination { + pub items: Vec, + pub cursor: Option, + pub total: u64, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ProjectRepoCreateParams { + pub repo_name: String, + pub description: Option, + /// Default: true. When false, skips bare git init and leaves default_branch empty; + /// the branch will be auto-detected and set on first push. + #[serde(default = "default_true")] + pub init_repo: bool, + /// Only used when init_repo is true. + #[serde(default = "default_branch_name")] + pub default_branch: String, + #[serde(default)] + pub is_private: bool, +} + +fn default_true() -> bool { + true +} + +fn default_branch_name() -> String { + "main".to_string() +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ProjectRepoCreateResponse { + pub uid: Uuid, + pub repo_name: String, + pub description: Option, + pub default_branch: String, + pub project_name: String, + pub is_private: bool, + pub storage_path: String, + pub created_at: DateTime, +} + +impl AppService { + pub async fn project_repo( + &self, + _ctx: &Session, + project_name: String, + query: ProjectRepositoryQuery, + ) -> Result { + let limit = query.limit.unwrap_or(10); + + let project = Project::find() + .filter(models::projects::project::Column::Name.eq(&project_name)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .ok_or(AppError::ProjectNotFound)?; + + let repo_list = Repo::find() + .filter(models::repos::repo::Column::Project.eq(project.id)) + .order_by_desc(models::repos::repo::Column::UpdatedAt) + .limit(limit) + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + let repo_total = Repo::find() + .filter(models::repos::repo::Column::Project.eq(project.id)) + .count(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + if repo_list.is_empty() { + return Ok(ProjectRepositoryPagination { + items: vec![], + cursor: None, + total: repo_total, + }); + } + + let repo_ids: Vec = repo_list.iter().map(|r| r.id).collect(); + + let commit_counts: HashMap = RepoCommit::find() + .select_only() + .column(models::repos::repo_commit::Column::Repo) + .column_as(models::repos::repo_commit::Column::Id.count(), "count") + .filter(models::repos::repo_commit::Column::Repo.is_in(repo_ids.clone())) + .group_by(models::repos::repo_commit::Column::Repo) + .into_tuple::<(Uuid, i64)>() + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .into_iter() + .collect(); + + let branch_counts: HashMap = RepoBranch::find() + .select_only() + .column(models::repos::repo_branch::Column::Repo) + .column_as(models::repos::repo_branch::Column::Repo.count(), "count") + .filter(models::repos::repo_branch::Column::Repo.is_in(repo_ids.clone())) + .group_by(models::repos::repo_branch::Column::Repo) + .into_tuple::<(Uuid, i64)>() + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .into_iter() + .collect(); + + let tag_counts: HashMap = RepoTag::find() + .select_only() + .column(models::repos::repo_tag::Column::Repo) + .column_as(models::repos::repo_tag::Column::Repo.count(), "count") + .filter(models::repos::repo_tag::Column::Repo.is_in(repo_ids.clone())) + .group_by(models::repos::repo_tag::Column::Repo) + .into_tuple::<(Uuid, i64)>() + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .into_iter() + .collect(); + + let star_counts: HashMap = RepoStar::find() + .select_only() + .column(models::repos::repo_star::Column::Repo) + .column_as(models::repos::repo_star::Column::User.count(), "count") + .filter(models::repos::repo_star::Column::Repo.is_in(repo_ids.clone())) + .group_by(models::repos::repo_star::Column::Repo) + .into_tuple::<(Uuid, i64)>() + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .into_iter() + .collect(); + + let watch_counts: HashMap = RepoWatch::find() + .select_only() + .column(models::repos::repo_watch::Column::Repo) + .column_as(models::repos::repo_watch::Column::User.count(), "count") + .filter(models::repos::repo_watch::Column::Repo.is_in(repo_ids.clone())) + .group_by(models::repos::repo_watch::Column::Repo) + .into_tuple::<(Uuid, i64)>() + .all(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .into_iter() + .collect(); + + let last_commit_times: HashMap>> = { + let mut map: HashMap>> = HashMap::new(); + for repo_id in &repo_ids { + let last_commit: Option = RepoCommit::find() + .filter(models::repos::repo_commit::Column::Repo.eq(*repo_id)) + .order_by_desc(models::repos::repo_commit::Column::CreatedAt) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + let time = last_commit.map(|c| c.created_at); + + map.insert(*repo_id, time); + } + map + }; + + let items: Vec = repo_list + .into_iter() + .map(|r| ProjectRepositoryItem { + uid: r.id, + repo_name: r.repo_name, + description: r.description, + default_branch: r.default_branch, + project_name: project.name.clone(), + is_private: r.is_private, + commit_count: *commit_counts.get(&r.id).unwrap_or(&0), + branch_count: *branch_counts.get(&r.id).unwrap_or(&0), + tag_count: *tag_counts.get(&r.id).unwrap_or(&0), + star_count: *star_counts.get(&r.id).unwrap_or(&0), + watch_count: *watch_counts.get(&r.id).unwrap_or(&0), + last_commit_at: last_commit_times.get(&r.id).and_then(|t| *t), + }) + .collect(); + + Ok(ProjectRepositoryPagination { + items, + cursor: None, + total: repo_total, + }) + } + + pub async fn project_repo_create( + &self, + ctx: &Session, + project_name: String, + params: ProjectRepoCreateParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + // Find project and verify membership + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + + let _member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))? + .ok_or(AppError::NoPower)?; + + // Check repo name uniqueness within project + let existing = RepoEntity::find() + .filter(RepoColumn::Project.eq(project.id)) + .filter(RepoColumn::RepoName.eq(¶ms.repo_name)) + .one(&self.db) + .await + .map_err(|e| AppError::DatabaseError(e.to_string()))?; + + if existing.is_some() { + return Err(AppError::RepoNameAlreadyExists); + } + + // Build storage path + let repos_root = self + .config + .repos_root() + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + let project_dir: PathBuf = [&repos_root, &project.name].iter().collect(); + let repo_dir: PathBuf = project_dir.join(format!("{}.git", params.repo_name)); + + // Only initialize bare git repo if requested + if params.init_repo { + crate::git::GitDomain::init_bare(&repo_dir).map_err(AppError::from)?; + } + + // Insert DB record + let repo_id = Uuid::now_v7(); + let now = Utc::now(); + + // default_branch is only set when init_repo is true; otherwise it stays empty + // and will be detected on first push via the sync hook + let default_branch = if params.init_repo { + params.default_branch.clone() + } else { + String::new() + }; + let default_branch_for_log = default_branch.clone(); + + let repo = RepoActiveModel { + id: Set(repo_id), + repo_name: Set(params.repo_name.clone()), + project: Set(project.id), + description: Set(params.description.clone()), + default_branch: Set(default_branch), + is_private: Set(params.is_private), + storage_path: Set(repo_dir.to_string_lossy().to_string()), + created_by: Set(user_uid), + created_at: Set(now), + updated_at: Set(now), + ai_code_review_enabled: Set(false), + }; + + let repo = repo.insert(&self.db).await?; + + let _ = self + .project_log_activity( + project.id, + Some(repo.id), + user_uid, + super::activity::ActivityLogParams { + event_type: "repo_create".to_string(), + title: format!("{} created repository '{}'", user_uid, params.repo_name), + repo_id: Some(repo.id), + content: params.description.clone(), + event_id: Some(repo.id), + event_sub_id: None, + metadata: Some(serde_json::json!({ + "repo_name": params.repo_name, + "default_branch": default_branch_for_log, + "is_private": params.is_private, + "init_repo": params.init_repo, + })), + is_private: false, + }, + ) + .await; + + Ok(ProjectRepoCreateResponse { + uid: repo.id, + repo_name: repo.repo_name, + description: repo.description, + default_branch: repo.default_branch, + project_name: project.name, + is_private: repo.is_private, + storage_path: repo.storage_path, + created_at: repo.created_at, + }) + } +} diff --git a/libs/service/project/repo_permission.rs b/libs/service/project/repo_permission.rs new file mode 100644 index 0000000..da74f27 --- /dev/null +++ b/libs/service/project/repo_permission.rs @@ -0,0 +1,163 @@ +use crate::AppService; +use crate::errors::AppError; +use session::Session; +use models::active_enums::MemberRole; +use models::project::{project_members, projects}; +use models::repos::repo; +use sea_orm::*; +use uuid::Uuid; + +pub enum RepoPermission { + Read, + Write, +} + +impl AppService { + pub async fn check_repo_permission( + &self, + ctx: &Session, + namespace: &str, + repo_name: &str, + required_permission: RepoPermission, + ) -> Result<(repo::Model, projects::Model), AppError> { + let repository = repo::Entity::find() + .filter(repo::Column::Namespace.eq(namespace)) + .filter(repo::Column::RepoName.eq(repo_name)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Repository not found".to_string()))?; + + let project = projects::Entity::find() + .filter(projects::Column::Name.eq(namespace)) + .one(&self.db) + .await? + .ok_or(AppError::ProjectNotFound)?; + + if repository.project_uid != Some(project.uid) { + return Err(AppError::NotFound( + "Repository not found in project".to_string(), + )); + } + + let user_uid = ctx.user(); + + match required_permission { + RepoPermission::Read => { + if project.is_public && !repository.is_private { + return Ok((repository, project)); + } + + let user_uid = user_uid.ok_or(AppError::Unauthorized)?; + + let member = project_members::Entity::find() + .filter(project_members::Column::ProjectId.eq(project.uid)) + .filter(project_members::Column::UserId.eq(user_uid)) + .one(&self.db) + .await?; + + if member.is_some() { + Ok((repository, project)) + } else { + Err(AppError::PermissionDenied) + } + } + RepoPermission::Write => { + let user_uid = user_uid.ok_or(AppError::Unauthorized)?; + + let member = project_members::Entity::find() + .filter(project_members::Column::ProjectId.eq(project.uid)) + .filter(project_members::Column::UserId.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + if member.scope == MemberRole::Owner || member.scope == MemberRole::Admin { + Ok((repository, project)) + } else { + Err(AppError::PermissionDenied) + } + } + } + } + + pub async fn check_repo_read_permission( + &self, + ctx: &Session, + namespace: &str, + repo_name: &str, + ) -> Result<(repo::Model, projects::Model), AppError> { + self.check_repo_permission(ctx, namespace, repo_name, RepoPermission::Read) + .await + } + + pub async fn check_repo_write_permission( + &self, + ctx: &Session, + namespace: &str, + repo_name: &str, + ) -> Result<(repo::Model, projects::Model), AppError> { + self.check_repo_permission(ctx, namespace, repo_name, RepoPermission::Write) + .await + } + + pub async fn check_repo_permission_by_uid( + &self, + ctx: &Session, + repo_uid: Uuid, + required_permission: RepoPermission, + ) -> Result<(repo::Model, projects::Model), AppError> { + let repository = repo::Entity::find_by_id(repo_uid) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Repository not found".to_string()))?; + + let project_uid = repository.project_uid.ok_or(AppError::NotFound( + "Repository has no associated project".to_string(), + ))?; + + let project = projects::Entity::find_by_id(project_uid) + .one(&self.db) + .await? + .ok_or(AppError::ProjectNotFound)?; + + let user_uid = ctx.user(); + + match required_permission { + RepoPermission::Read => { + if project.is_public && !repository.is_private { + return Ok((repository, project)); + } + + let user_uid = user_uid.ok_or(AppError::Unauthorized)?; + + let member = project_members::Entity::find() + .filter(project_members::Column::ProjectId.eq(project.uid)) + .filter(project_members::Column::UserId.eq(user_uid)) + .one(&self.db) + .await?; + + if member.is_some() { + Ok((repository, project)) + } else { + Err(AppError::PermissionDenied) + } + } + RepoPermission::Write => { + let user_uid = user_uid.ok_or(AppError::Unauthorized)?; + + let member = project_members::Entity::find() + .filter(project_members::Column::ProjectId.eq(project.uid)) + .filter(project_members::Column::UserId.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + if member.scope == MemberRole::Owner || member.scope == MemberRole::Admin { + Ok((repository, project)) + } else { + Err(AppError::PermissionDenied) + } + } + } + } +} diff --git a/libs/service/project/settings.rs b/libs/service/project/settings.rs new file mode 100644 index 0000000..64732e8 --- /dev/null +++ b/libs/service/project/settings.rs @@ -0,0 +1,197 @@ +use crate::{AppService, error::AppError}; +use chrono::Utc; +use models::projects::{MemberRole, project_audit_log, project_history_name}; +use models::repos::{Repo, repo}; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ExchangeProjectVisibility { + pub is_public: bool, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ExchangeProjectTitle { + pub display_name: Option, + pub description: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ExchangeProjectName { + pub name: String, +} + +impl AppService { + pub async fn project_exchange_name( + &self, + ctx: &Session, + project_name: String, + params: ExchangeProjectName, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + let oper = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + if let Ok(role) = self.utils_project_context_role(&ctx, project_name).await { + if role != MemberRole::Owner { + return Err(AppError::NoPower); + } + } else { + return Err(AppError::NoPower); + } + + let txn = self.db.begin().await?; + + let mut active = project.clone().into_active_model(); + active.name = Set(params.name.clone()); + active.update(&txn).await?; + + // Update repo names for all repos in this project + let repos = Repo::find() + .filter(repo::Column::Project.eq(project.id)) + .all(&txn) + .await?; + + for r in repos { + let repo_name = r.repo_name.clone(); + let mut ra: repo::ActiveModel = r.into(); + ra.repo_name = Set(format!("{}/{}", params.name.clone(), repo_name)); + ra.update(&txn).await?; + } + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("exchange_name".to_string()), + details: Set(Some(serde_json::json!({ + "name": params.name, + "old_name": project.name, + "username": oper.username, + "user_uid": user_uid, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + let history = project_history_name::ActiveModel { + id: NotSet, + project_uid: Set(project.id), + history_name: Set(project.name), + changed_at: Set(Utc::now()), + }; + history.insert(&txn).await?; + log.insert(&txn).await?; + txn.commit().await?; + Ok(()) + } + + pub async fn project_exchange_visibility( + &self, + ctx: &Session, + project_name: String, + params: ExchangeProjectVisibility, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + if let Ok(role) = self.utils_project_context_role(&ctx, project_name).await { + if role == MemberRole::Member { + return Err(AppError::NoPower); + } + } else { + return Err(AppError::NoPower); + } + + if project.is_public != params.is_public { + let txn = self.db.begin().await?; + let mut active = project.clone().into_active_model(); + active.is_public = Set(params.is_public); + active.update(&txn).await?; + + let oper = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&txn) + .await? + .ok_or(AppError::UserNotFound)?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("exchange_visibility".to_string()), + details: Set(Some(serde_json::json!({ + "is_public": params.is_public, + "old_is_public": project.is_public, + "username": oper.username, + "user_uid": user_uid, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + txn.commit().await?; + } + Ok(()) + } + + pub async fn project_exchange_title( + &self, + ctx: &Session, + project_name: String, + params: ExchangeProjectTitle, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self + .utils_find_project_by_name(project_name.clone()) + .await?; + if let Ok(role) = self.utils_project_context_role(&ctx, project_name).await { + if role == MemberRole::Member { + return Err(AppError::NoPower); + } + } else { + return Err(AppError::NoPower); + } + + let txn = self.db.begin().await?; + let mut active = project.clone().into_active_model(); + if let Some(description) = params.description.clone() { + active.description = Set(Some(description)); + } + if let Some(display_name) = params.display_name.clone() { + active.display_name = Set(display_name); + } + active.update(&txn).await?; + + let oper = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&txn) + .await? + .ok_or(AppError::UserNotFound)?; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("exchange_description".to_string()), + details: Set(Some(serde_json::json!({ + "description": params.description, + "username": oper.username, + "user_uid": user_uid, + "old_description": project.description, + "display_name": params.display_name, + "old_display_name": project.display_name, + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&txn).await?; + txn.commit().await?; + Ok(()) + } +} diff --git a/libs/service/project/standard.rs b/libs/service/project/standard.rs new file mode 100644 index 0000000..73e62e8 --- /dev/null +++ b/libs/service/project/standard.rs @@ -0,0 +1,61 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{Project, ProjectHistoryName, project_history_name}; +use sea_orm::*; +use session::Session; + +impl AppService { + pub async fn project_standard_name( + &self, + name: String, + ctx: &Session, + ) -> Result { + let project = Project::find() + .filter( + Condition::any() + .add(::Column::Name.ilike(&name)) + .add(::Column::Name.eq(&name)), + ) + .one(&self.db) + .await?; + + if let Some(project) = project { + if !project.is_public { + if let Err(_) = self + .utils_project_context_role(&ctx, project.name.clone()) + .await + { + return Err(AppError::PermissionDenied); + } + } + return Ok(project.name); + } + + let project_history = ProjectHistoryName::find() + .filter( + Condition::any() + .add(project_history_name::Column::HistoryName.ilike(&name)) + .add(project_history_name::Column::HistoryName.eq(&name)), + ) + .one(&self.db) + .await?; + + if let Some(project_history) = project_history { + if let Some(project) = Project::find_by_id(project_history.project_uid) + .one(&self.db) + .await? + { + if !project.is_public { + if let Err(_) = self + .utils_project_context_role(&ctx, project.name.clone()) + .await + { + return Err(AppError::PermissionDenied); + } + } + return Ok(project.name); + } + } + Err(AppError::ProjectNotFound) + } +} diff --git a/libs/service/project/transfer_repo.rs b/libs/service/project/transfer_repo.rs new file mode 100644 index 0000000..87b9287 --- /dev/null +++ b/libs/service/project/transfer_repo.rs @@ -0,0 +1,193 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{MemberRole, ProjectMember, project_audit_log, project_members}; +use models::repos::repo; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct TransferRepoParams { + pub target_project_name: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct TransferRepoResponse { + pub repo_id: Uuid, + pub old_project_name: String, + pub new_project_name: String, + pub repo_name: String, +} + +impl AppService { + pub async fn transfer_repo( + &self, + ctx: &Session, + source_project_name: String, + repo_name: String, + params: TransferRepoParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let source_project = self + .utils_find_project_by_name(source_project_name.clone()) + .await?; + let target_project = self + .utils_find_project_by_name(params.target_project_name.clone()) + .await?; + + let source_member = ProjectMember::find() + .filter(project_members::Column::Project.eq(source_project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + if source_member + .scope_role() + .map_err(|_| AppError::InternalError)? + != MemberRole::Owner + && source_member + .scope_role() + .map_err(|_| AppError::InternalError)? + != MemberRole::Admin + { + return Err(AppError::PermissionDenied); + } + + let target_member = ProjectMember::find() + .filter(project_members::Column::Project.eq(target_project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::PermissionDenied)?; + + if target_member + .scope_role() + .map_err(|_| AppError::InternalError)? + != MemberRole::Owner + && target_member + .scope_role() + .map_err(|_| AppError::InternalError)? + != MemberRole::Admin + { + return Err(AppError::PermissionDenied); + } + + let repository = repo::Entity::find() + .filter(repo::Column::RepoName.eq(repo_name.clone())) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Repository not found".to_string()))?; + + if repository.project != source_project.id { + return Err(AppError::NotFound( + "Repository not found in source project".to_string(), + )); + } + + let target_repo_exists = repo::Entity::find() + .filter(repo::Column::RepoName.eq(repo_name.clone())) + .one(&self.db) + .await?; + + if target_repo_exists.is_some() { + return Err(AppError::InternalServerError( + "Repository with same name already exists in target project".to_string(), + )); + } + + let txn = self.db.begin().await?; + + let old_project_name = source_project.name.clone(); + let mut active_repo: repo::ActiveModel = repository.clone().into(); + active_repo.project = Set(target_project.id); + + let updated_repo = active_repo.update(&txn).await?; + + let source_log = project_audit_log::ActiveModel { + project: Set(source_project.id), + actor: Set(user_uid), + action: Set("repo_transfer_out".to_string()), + details: Set(Some(serde_json::json!({ + "repo_id": repository.id, + "repo_name": repo_name, + "target_project": target_project.name, + }))), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + source_log.insert(&txn).await?; + + let target_log = project_audit_log::ActiveModel { + project: Set(target_project.id), + actor: Set(user_uid), + action: Set("repo_transfer_in".to_string()), + details: Set(Some(serde_json::json!({ + "repo_id": repository.id, + "repo_name": repo_name, + "source_project": source_project.name, + }))), + created_at: Set(chrono::Utc::now()), + ..Default::default() + }; + target_log.insert(&txn).await?; + + txn.commit().await?; + + let _ = self + .project_log_activity( + source_project.id, + Some(updated_repo.id), + user_uid, + super::activity::ActivityLogParams { + event_type: "repo_transfer_out".to_string(), + title: format!( + "{} transferred repository '{}' to project '{}'", + user_uid, repo_name, target_project.name + ), + repo_id: Some(updated_repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "repo_name": repo_name, + "target_project": target_project.name, + })), + is_private: false, + }, + ) + .await; + let _ = self + .project_log_activity( + target_project.id, + Some(updated_repo.id), + user_uid, + super::activity::ActivityLogParams { + event_type: "repo_transfer_in".to_string(), + title: format!( + "{} transferred repository '{}' from project '{}'", + user_uid, repo_name, source_project.name + ), + repo_id: Some(updated_repo.id), + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "repo_name": repo_name, + "source_project": source_project.name, + })), + is_private: false, + }, + ) + .await; + + Ok(TransferRepoResponse { + repo_id: updated_repo.id, + old_project_name, + new_project_name: target_project.name, + repo_name, + }) + } +} diff --git a/libs/service/project/watch.rs b/libs/service/project/watch.rs new file mode 100644 index 0000000..579860b --- /dev/null +++ b/libs/service/project/watch.rs @@ -0,0 +1,233 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{ProjectWatch, project_audit_log, project_watch}; +use models::users::user_email; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WatchUserInfo { + pub uid: Uuid, + pub username: String, + pub avatar_url: String, +} + +impl AppService { + pub async fn project_watch(&self, ctx: &Session, project_name: String) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let watch_exists = ProjectWatch::find() + .filter(project_watch::Column::User.eq(user_uid)) + .filter(project_watch::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + if watch_exists.is_some() { + return Err(AppError::BadRequest( + "Already watching this project".to_string(), + )); + } + + ProjectWatch::insert(project_watch::ActiveModel { + id: Default::default(), + project: Set(project.id), + user: Set(user_uid), + notifications_enabled: Set(true), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }) + .exec(&self.db) + .await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "project_watch".to_string(), + title: format!("{} started watching the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("project_watch".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } + + pub async fn project_unwatch( + &self, + ctx: &Session, + project_name: String, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let watch_exists = ProjectWatch::find() + .filter(project_watch::Column::User.eq(user_uid)) + .filter(project_watch::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + if watch_exists.is_none() { + return Err(AppError::NotFound("Not watching this project".to_string())); + } + + ProjectWatch::delete_many() + .filter(project_watch::Column::User.eq(user_uid)) + .filter(project_watch::Column::Project.eq(project.id)) + .exec(&self.db) + .await?; + + let _ = self + .project_log_activity( + project.id, + None, + user_uid, + super::activity::ActivityLogParams { + event_type: "project_unwatch".to_string(), + title: format!("{} stopped watching the project", user_uid), + repo_id: None, + content: None, + event_id: None, + event_sub_id: None, + metadata: Some(serde_json::json!({ + "project_name": project.name.clone(), + })), + is_private: false, + }, + ) + .await; + + let log = project_audit_log::ActiveModel { + project: Set(project.id), + actor: Set(user_uid), + action: Set("project_unwatch".to_string()), + details: Set(Some(serde_json::json!({ + "project_name": project.name.clone(), + }))), + created_at: Set(Utc::now()), + ..Default::default() + }; + log.insert(&self.db).await?; + + Ok(()) + } + + pub async fn project_is_watch( + &self, + ctx: &Session, + project_name: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + + let watch_exists = ProjectWatch::find() + .filter(project_watch::Column::User.eq(user_uid)) + .filter(project_watch::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + Ok(watch_exists.is_some()) + } + + pub async fn project_watches(&self, project_name: String) -> Result { + let project = self.utils_find_project_by_name(project_name).await?; + + let watches = ProjectWatch::find() + .filter(project_watch::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + Ok(watches) + } + + pub async fn project_watch_user_list( + &self, + project_name: String, + pager: crate::Pager, + ) -> Result, AppError> { + let project = self.utils_find_project_by_name(project_name).await?; + + let watches = ProjectWatch::find() + .filter(project_watch::Column::Project.eq(project.id)) + .order_by_desc(project_watch::Column::CreatedAt) + .limit(pager.par_page as u64) + .offset(((pager.page - 1) * pager.par_page) as u64) + .all(&self.db) + .await?; + + let user_uids: Vec = watches.into_iter().map(|watch| watch.user).collect(); + + if user_uids.is_empty() { + return Ok(vec![]); + } + + let users = models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(user_uids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| WatchUserInfo { + uid: u.uid, + username: u.username, + avatar_url: u.avatar_url.unwrap_or_default(), + }) + .collect(); + + Ok(users) + } + + pub async fn project_watch_user_emails( + &self, + project_uid: Uuid, + notify: bool, + ) -> Result, AppError> { + let watches = ProjectWatch::find() + .filter(project_watch::Column::Project.eq(project_uid)) + .all(&self.db) + .await? + .into_iter() + .filter(|x| x.notifications_enabled == notify) + .collect::>(); + + let user_uids: Vec = watches.into_iter().map(|watch| watch.user).collect(); + + if user_uids.is_empty() { + return Ok(vec![]); + } + + let emails = user_email::Entity::find() + .filter(user_email::Column::User.is_in(user_uids)) + .all(&self.db) + .await? + .into_iter() + .map(|u| u.email) + .collect(); + + Ok(emails) + } +} diff --git a/libs/service/pull_request/merge.rs b/libs/service/pull_request/merge.rs new file mode 100644 index 0000000..7891619 --- /dev/null +++ b/libs/service/pull_request/merge.rs @@ -0,0 +1,454 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::pull_request::{PrStatus, pull_request}; +use models::repos::repo; +use models::users::user; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "lowercase")] +pub enum MergeStrategy { + MergeCommit, + Squash, + Rebase, +} + +impl Default for MergeStrategy { + fn default() -> Self { + MergeStrategy::MergeCommit + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct MergeAnalysisResponse { + pub can_fast_forward: bool, + pub is_up_to_date: bool, + pub is_normal: bool, + pub analysis_flags: Vec, + /// Strategies supported given the current state of the PR. + pub supported_strategies: Vec, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct MergeRequest { + #[serde(default)] + pub fast_forward: bool, + #[serde(default)] + pub strategy: MergeStrategy, + #[serde(default = "default_merge_message")] + pub message: String, +} + +fn default_merge_message() -> String { + "Merge pull request".to_string() +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct MergeResponse { + pub repo: Uuid, + pub number: i64, + pub status: String, + pub merged_by: Uuid, + pub merged_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct MergeConflictFile { + pub path: String, + pub status: String, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct MergeConflictResponse { + pub has_conflicts: bool, + pub conflicted_files: Vec, +} + +fn resolve_ref_name(name: &str) -> String { + if name.starts_with("refs/") { + name.to_string() + } else if name.contains('/') { + format!("refs/heads/{}", name) + } else { + format!("refs/heads/{}", name) + } +} + +impl AppService { + /// Analyze merge readiness of a pull request. + pub async fn merge_analysis( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let domain = git::GitDomain::from_model(repo)?; + + let head_ref_name = resolve_ref_name(&pr.head); + let head_oid = domain + .ref_target(&head_ref_name)? + .ok_or_else(|| AppError::BadRequest("Head ref has no OID".to_string()))?; + + let (analysis, _pref) = domain.merge_analysis_for_ref(&pr.base, &head_oid)?; + + let mut flags = Vec::new(); + if analysis.is_fast_forward { + flags.push("fast_forward".to_string()); + } + if analysis.is_up_to_date { + flags.push("up_to_date".to_string()); + } + if analysis.is_normal { + flags.push("normal".to_string()); + } + + // Determine supported strategies. + // All three strategies are always available for open PRs. + let supported_strategies = if analysis.is_up_to_date { + // Already merged — no strategies available + vec![] + } else { + vec![ + "mergecommit".to_string(), + "squash".to_string(), + "rebase".to_string(), + ] + }; + + Ok(MergeAnalysisResponse { + can_fast_forward: analysis.is_fast_forward, + is_up_to_date: analysis.is_up_to_date, + is_normal: analysis.is_normal, + analysis_flags: flags, + supported_strategies, + }) + } + + pub async fn merge_conflict_check( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let domain = git::GitDomain::from_model(repo)?; + + let head_ref_name = resolve_ref_name(&pr.head); + let head_oid = domain + .ref_target(&head_ref_name)? + .ok_or_else(|| AppError::BadRequest("Head ref has no OID".to_string()))?; + + let (analysis, _pref) = domain.merge_analysis_for_ref(&pr.base, &head_oid)?; + + let has_conflicts = + !analysis.is_fast_forward && !analysis.is_up_to_date && domain.merge_is_conflicted(); + + if has_conflicts { + let conflicted_files = self.get_conflicted_files(&domain)?; + Ok(MergeConflictResponse { + has_conflicts: true, + conflicted_files, + }) + } else { + Ok(MergeConflictResponse { + has_conflicts: false, + conflicted_files: vec![], + }) + } + } + + /// ONLY admin/owner of the target repo can merge. + pub async fn merge_execute( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + request: MergeRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + if pr.status == PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Pull request is already merged".to_string(), + )); + } + if pr.status == PrStatus::Closed.to_string() { + return Err(AppError::BadRequest( + "Cannot merge a closed pull request".to_string(), + )); + } + + let protection = self.branch_protection_find(repo.id, &pr.base).await?; + + if let Some(ref rule) = protection { + if rule.forbid_merge { + return Err(AppError::Forbidden(format!( + "Branch '{}' is protected: merges are forbidden", + pr.base + ))); + } + + if rule.required_approvals > 0 { + let approval_check = self + .branch_protection_check_approvals( + namespace.clone(), + repo_name.clone(), + pr_number, + ctx, + ) + .await?; + if !approval_check.enough_approvals { + return Err(AppError::Forbidden(format!( + "Branch '{}' requires {} approval(s), but only {} found", + pr.base, rule.required_approvals, approval_check.approvals + ))); + } + } + + if rule.require_linear_history && request.strategy == MergeStrategy::MergeCommit { + return Err(AppError::Forbidden(format!( + "Branch '{}' requires linear history: merge commits are not allowed, use squash or rebase instead", + pr.base + ))); + } + } + + let domain = git::GitDomain::from_model(repo.clone())?; + + let head_ref_name = resolve_ref_name(&pr.head); + let head_oid = domain + .ref_target(&head_ref_name)? + .ok_or_else(|| AppError::BadRequest("Head ref has no OID".to_string()))?; + let base_oid = domain + .ref_target(&resolve_ref_name(&pr.base))? + .ok_or_else(|| AppError::BadRequest("Base ref has no OID".to_string()))?; + + let (analysis, _pref) = domain.merge_analysis_for_ref(&pr.base, &head_oid)?; + + if !analysis.is_fast_forward && !analysis.is_up_to_date && domain.merge_is_conflicted() { + return Err(AppError::BadRequest( + "Pull request has merge conflicts".to_string(), + )); + } + + // Build merge commit message + let merge_msg = if request.message == default_merge_message() { + format!("{} (#{})\n\n{}", pr.title, pr_number, pr.title) + } else { + request.message + }; + + // Get author signature for merge commit + let sig = domain.commit_default_signature()?; + let committer = sig.clone(); + + if analysis.is_fast_forward && request.fast_forward { + // Fast-forward: move base ref forward to head + let base_ref_name = resolve_ref_name(&pr.base); + domain.ref_update(&base_ref_name, head_oid.clone(), None, None)?; + } else { + match request.strategy { + MergeStrategy::MergeCommit => { + domain.merge_commits(&base_oid, &head_oid, None)?; + + // Write the merge commit from the merge index + let merge_oid = domain.commit_create_from_index( + None, + &sig, + &committer, + &merge_msg, + &[base_oid.clone(), head_oid.clone()], + )?; + let base_ref_name = resolve_ref_name(&pr.base); + domain.ref_update(&base_ref_name, merge_oid, None, None)?; + let _ = domain.merge_abort(); + } + MergeStrategy::Squash => { + // Squash all commits from source branch into one on top of base + let squash_oid = domain.squash_commits(&base_oid, &pr.head)?; + let base_ref_name = resolve_ref_name(&pr.base); + domain.ref_update(&base_ref_name, squash_oid, None, None)?; + } + MergeStrategy::Rebase => { + // Rebase source commits onto base + let rebase_oid = domain.rebase_commits(&base_oid, &head_oid)?; + let base_ref_name = resolve_ref_name(&pr.base); + domain.ref_update(&base_ref_name, rebase_oid, None, None)?; + } + } + } + + let now = Utc::now(); + + let mut active: pull_request::ActiveModel = pr.clone().into(); + active.status = Set(PrStatus::Merged.to_string()); + active.merged_by = Set(Some(user_uid)); + active.merged_at = Set(Some(now)); + active.updated_at = Set(now); + let merged_model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "pr_merge".to_string(), + title: format!("{} merged pull request #{}", actor_username, pr_number), + repo_id: Some(repo.id), + content: Some(merged_model.title), + event_id: None, + event_sub_id: Some(pr_number), + metadata: Some(serde_json::json!({ + "base": pr.clone().base, + "head": pr.head.clone(), + })), + is_private: false, + }, + ) + .await; + + Ok(MergeResponse { + repo: repo.id, + number: pr_number, + status: PrStatus::Merged.to_string(), + merged_by: user_uid, + merged_at: now, + }) + } + + pub async fn merge_abort( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let repo: repo::Model = self + .utils_check_repo_admin(namespace, repo_name, ctx) + .await?; + + let _pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let domain = git::GitDomain::from_model(repo.clone())?; + domain.merge_abort()?; + + let user_uid = ctx.user().unwrap_or(Uuid::nil()); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + ActivityLogParams { + event_type: "pr_merge_abort".to_string(), + title: format!("{} aborted merge for PR #{}", user_uid, pr_number), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: Some(pr_number), + metadata: Some(serde_json::json!({ + "pr_number": pr_number, + })), + is_private: false, + }, + ) + .await; + + Ok(()) + } + + pub async fn merge_is_in_progress( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let _pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let domain = git::GitDomain::from_model(repo)?; + Ok(domain.merge_is_in_progress()) + } + + fn get_conflicted_files( + &self, + domain: &git::GitDomain, + ) -> Result, AppError> { + let index = domain + .repo() + .index() + .map_err(|e| AppError::InternalServerError(e.to_string()))?; + + let files = match index.conflicts() { + Ok(conflicts) => conflicts + .filter_map(|result| result.ok()) + .filter_map(|conflict| { + conflict.our.as_ref().map(|entry| MergeConflictFile { + path: String::from_utf8_lossy(&entry.path).to_string(), + status: "both_modified".to_string(), + }) + }) + .collect(), + Err(_) => vec![], + }; + + Ok(files) + } +} diff --git a/libs/service/pull_request/mod.rs b/libs/service/pull_request/mod.rs new file mode 100644 index 0000000..746d9a9 --- /dev/null +++ b/libs/service/pull_request/mod.rs @@ -0,0 +1,31 @@ +use redis::AsyncCommands; +use uuid::Uuid; + +pub mod merge; +pub mod review; +pub mod review_comment; +pub mod review_request; + +pub mod pull_request; + +// Re-export types +pub use merge::{MergeAnalysisResponse, MergeConflictResponse, MergeRequest, MergeResponse}; +pub use pull_request::{ + PrCommitResponse, PrCommitsListResponse, PullRequestCreateRequest, PullRequestListResponse, + PullRequestResponse, PullRequestSummaryResponse, PullRequestUpdateRequest, +}; +pub use review::{ReviewListResponse, ReviewResponse, ReviewSubmitRequest, ReviewUpdateRequest}; +pub use review_comment::{ + ReviewCommentCreateRequest, ReviewCommentListQuery, ReviewCommentListResponse, + ReviewCommentReplyRequest, ReviewCommentResponse, ReviewCommentUpdateRequest, +}; +pub use review_request::{ + ReviewRequestCreateRequest, ReviewRequestListResponse, ReviewRequestResponse, +}; + +pub(crate) async fn invalidate_pr_cache(cache: &db::cache::AppCache, repo_id: Uuid, number: i64) { + if let Ok(mut conn) = cache.conn().await { + let key = format!("pr:get:{}:{}", repo_id, number); + let _: Option<()> = conn.del::<_, ()>(key).await.ok(); + } +} diff --git a/libs/service/pull_request/pull_request.rs b/libs/service/pull_request/pull_request.rs new file mode 100644 index 0000000..a791828 --- /dev/null +++ b/libs/service/pull_request/pull_request.rs @@ -0,0 +1,841 @@ +use crate::AppService; +use crate::error::AppError; +use crate::git::CommitMeta; +use crate::git::diff::{SideBySideDiffQuery, SideBySideDiffResponse}; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::projects::{MemberRole, project_members}; +use models::pull_request::{PrStatus, pull_request}; +use models::repos::repo; +use models::users::user; +use redis::AsyncCommands; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct PullRequestCreateRequest { + pub title: String, + pub body: Option, + pub base: String, + pub head: String, + #[serde(default)] + pub draft: bool, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct PullRequestUpdateRequest { + pub title: Option, + pub body: Option, + pub base: Option, + #[serde(default)] + pub draft: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct PullRequestResponse { + pub repo: Uuid, + pub number: i64, + pub issue: Option, + pub title: String, + pub body: Option, + pub author: Uuid, + pub author_username: Option, + pub base: String, + pub head: String, + pub status: String, + pub merged_by: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, + pub merged_at: Option>, + pub created_by_ai: bool, +} + +impl From for PullRequestResponse { + fn from(pr: pull_request::Model) -> Self { + Self { + repo: pr.repo, + number: pr.number, + issue: Some(pr.issue), + title: pr.title, + body: pr.body, + author: pr.author, + author_username: None, + base: pr.base, + head: pr.head, + status: pr.status, + merged_by: pr.merged_by, + created_at: pr.created_at, + updated_at: pr.updated_at, + merged_at: pr.merged_at, + created_by_ai: pr.created_by_ai, + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct PullRequestListResponse { + pub pull_requests: Vec, + pub total: u64, + pub page: i64, + pub per_page: i64, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct PullRequestSummaryResponse { + pub total: u64, + pub open: u64, + pub merged: u64, + pub closed: u64, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct PullRequestStatusResponse { + pub status: String, + pub can_merge: bool, + pub merge_analysis: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct MergeAnalysisResult { + pub is_fast_forward: bool, + pub is_up_to_date: bool, + pub is_normal: bool, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct PrCommitResponse { + pub oid: String, + pub short_oid: String, + pub message: String, + pub summary: String, + pub author_name: String, + pub author_email: String, + pub authored_at: chrono::DateTime, + pub committer_name: String, + pub committer_email: String, + pub committed_at: chrono::DateTime, +} + +impl From for PrCommitResponse { + fn from(c: CommitMeta) -> Self { + let oid_str = c.oid.to_string(); + fn sig_to_dt(time_secs: i64) -> chrono::DateTime { + chrono::DateTime::from_timestamp(time_secs, 0) + .unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap()) + } + Self { + oid: oid_str.clone(), + short_oid: oid_str[..std::cmp::min(7, oid_str.len())].to_string(), + message: c.message, + summary: c.summary, + author_name: c.author.name, + author_email: c.author.email, + authored_at: sig_to_dt(c.author.time_secs), + committer_name: c.committer.name, + committer_email: c.committer.email, + committed_at: sig_to_dt(c.committer.time_secs), + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct PrCommitsListResponse { + pub commits: Vec, +} + +impl AppService { + /// List pull requests for a repo. + pub async fn pull_request_list( + &self, + namespace: String, + repo_name: String, + status: Option, + page: Option, + per_page: Option, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + let offset = (page - 1) * per_page; + + let mut query = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .order_by_desc(pull_request::Column::CreatedAt); + + if let Some(ref s) = status { + query = query.filter(pull_request::Column::Status.eq(s)); + } + + let total = query.clone().count(&self.db).await?; + let prs = query + .offset(offset as u64) + .limit(per_page as u64) + .all(&self.db) + .await?; + + let author_ids: Vec = prs.iter().map(|p| p.author).collect(); + let authors = if author_ids.is_empty() { + vec![] + } else { + user::Entity::find() + .filter(user::Column::Uid.is_in(author_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = prs + .into_iter() + .map(|pr| { + let username = authors + .iter() + .find(|u| u.uid == pr.author) + .map(|u| u.username.clone()); + PullRequestResponse { + author_username: username, + ..PullRequestResponse::from(pr) + } + }) + .collect(); + + Ok(PullRequestListResponse { + pull_requests: responses, + total, + page, + per_page, + }) + } + + /// Get a single pull request. + pub async fn pull_request_get( + &self, + namespace: String, + repo_name: String, + number: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let cache_key = format!("pr:get:{}:{}", repo.id, number); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let author = user::Entity::find_by_id(pr.author) + .one(&self.db) + .await + .ok() + .flatten(); + let username = author.map(|u| u.username); + + let response = PullRequestResponse { + author_username: username, + ..PullRequestResponse::from(pr) + }; + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + 300, + ) + .await + .ok(); + } + + Ok(response) + } + + /// Get the next PR number for a repo. + async fn next_pr_number(&self, repo_id: Uuid) -> Result { + let max_num: Option> = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo_id)) + .select_only() + .column_as(pull_request::Column::Number.max(), "max_num") + .into_tuple::>() + .one(&self.db) + .await?; + Ok(max_num.flatten().unwrap_or(0) + 1) + } + + pub async fn pull_request_create( + &self, + namespace: String, + repo_name: String, + request: PullRequestCreateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let number = self.next_pr_number(repo.id).await?; + let now = Utc::now(); + + let status = if request.draft { + PrStatus::Draft + } else { + PrStatus::Open + }; + + let active = pull_request::ActiveModel { + repo: Set(repo.id), + number: Set(number), + issue: Set(Uuid::now_v7()), + title: Set(request.title), + body: Set(request.body), + author: Set(user_uid), + base: Set(request.base), + head: Set(request.head), + status: Set(status.to_string()), + merged_by: Set(None), + created_at: Set(now), + updated_at: Set(now), + merged_at: Set(None), + created_by_ai: Set(false), + ..Default::default() + }; + let model = active.insert(&self.db).await?; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "pr_open".to_string(), + title: format!( + "{} opened pull request #{}: {}", + actor_username, number, model.title + ), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: Some(number), + metadata: Some(serde_json::json!({ + "base": model.base, + "head": model.head, + })), + is_private: false, + }, + ) + .await; + + if repo.ai_code_review_enabled && !request.draft { + let this = self.clone(); + let namespace = namespace.clone(); + let repo_name = repo.repo_name.clone(); + let pr_number = Some(number); + let repo_for_bg = repo.clone(); + tokio::spawn(async move { + let _ = this + .trigger_ai_code_review_internal( + namespace, + repo_name, + pr_number, + None, + repo_for_bg, + ) + .await; + }); + } + + Ok(PullRequestResponse::from(model)) + } + + /// Update a PR (title, body, base, draft status). + /// Only the PR author OR admin/owner can update. + pub async fn pull_request_update( + &self, + namespace: String, + repo_name: String, + number: i64, + request: PullRequestUpdateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + // Permission: author OR admin/owner + let is_author = pr.author == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + // Cannot update a merged PR + if pr.status == PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot update a merged pull request".to_string(), + )); + } + + let mut active: pull_request::ActiveModel = pr.clone().into(); + if let Some(title) = request.title { + active.title = Set(title); + } + if let Some(body) = request.body { + active.body = Set(Some(body)); + } + if let Some(base) = request.base { + active.base = Set(base); + } + if let Some(draft) = request.draft { + active.status = Set(if draft { + PrStatus::Draft.to_string() + } else { + PrStatus::Open.to_string() + }); + } + active.updated_at = Set(Utc::now()); + + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: "pr_update".to_string(), + title: format!("{} updated pull request #{}", actor_username, number), + repo_id: Some(repo.id), + content: Some(model.title.clone()), + event_id: None, + event_sub_id: Some(number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(PullRequestResponse::from(model)) + } + + /// Close a pull request. Author OR admin/owner only. + pub async fn pull_request_close( + &self, + namespace: String, + repo_name: String, + number: i64, + ctx: &Session, + ) -> Result { + self.pr_set_status(namespace, repo_name, number, PrStatus::Closed, ctx) + .await + } + + /// Reopen a pull request. Author OR admin/owner only. + pub async fn pull_request_reopen( + &self, + namespace: String, + repo_name: String, + number: i64, + ctx: &Session, + ) -> Result { + self.pr_set_status(namespace, repo_name, number, PrStatus::Open, ctx) + .await + } + + pub async fn pull_request_delete( + &self, + namespace: String, + repo_name: String, + number: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + // Permission: author OR admin/owner + let is_author = pr.author == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + // Cascade delete related records + models::pull_request::PullRequestCommit::delete_many() + .filter(models::pull_request::pull_request_commit::Column::Repo.eq(repo.id)) + .filter(models::pull_request::pull_request_commit::Column::Number.eq(number)) + .exec(&self.db) + .await?; + models::pull_request::PullRequestReview::delete_many() + .filter(models::pull_request::pull_request_review::Column::Repo.eq(repo.id)) + .filter(models::pull_request::pull_request_review::Column::Number.eq(number)) + .exec(&self.db) + .await?; + models::pull_request::PullRequestReviewComment::delete_many() + .filter(models::pull_request::pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(models::pull_request::pull_request_review_comment::Column::Number.eq(number)) + .exec(&self.db) + .await?; + + pull_request::Entity::delete_by_id((repo.id, number)) + .exec(&self.db) + .await?; + + super::invalidate_pr_cache(&self.cache, repo.id, number).await; + + let pr_title = pr.title.clone(); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + ActivityLogParams { + event_type: "pr_delete".to_string(), + title: format!( + "{} deleted pull request #{}: {}", + user_uid, number, pr_title + ), + repo_id: Some(repo.id), + content: Some(pr_title.clone()), + event_id: None, + event_sub_id: Some(number), + metadata: Some(serde_json::json!({ + "pr_number": number, + "pr_title": pr_title, + "base": pr.base, + "head": pr.head, + })), + is_private: false, + }, + ) + .await; + + Ok(()) + } + + /// Get PR summary counts. + pub async fn pull_request_summary( + &self, + namespace: String, + repo_name: String, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let total: u64 = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .count(&self.db) + .await?; + let open: u64 = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Status.eq(PrStatus::Open.to_string())) + .count(&self.db) + .await?; + let merged: u64 = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Status.eq(PrStatus::Merged.to_string())) + .count(&self.db) + .await?; + let closed: u64 = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Status.eq(PrStatus::Closed.to_string())) + .count(&self.db) + .await?; + + Ok(PullRequestSummaryResponse { + total, + open, + merged, + closed, + }) + } + + async fn pr_set_status( + &self, + namespace: String, + repo_name: String, + number: i64, + status: PrStatus, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + // Permission: author OR admin/owner + let is_author = pr.author == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + if pr.status == PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot modify a merged pull request".to_string(), + )); + } + + let mut active: pull_request::ActiveModel = pr.clone().into(); + active.status = Set(status.to_string()); + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, number).await; + + let actor_username = user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + .unwrap_or_default(); + let event_type = if status == PrStatus::Closed { + "pr_close" + } else { + "pr_reopen" + }; + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + super::super::project::activity::ActivityLogParams { + event_type: event_type.to_string(), + title: format!( + "{} {} pull request #{}", + actor_username, + if status == PrStatus::Closed { + "closed" + } else { + "reopened" + }, + number + ), + repo_id: Some(repo.id), + content: Some(model.title.clone()), + event_id: None, + event_sub_id: Some(number), + metadata: None, + is_private: false, + }, + ) + .await; + + Ok(PullRequestResponse::from(model)) + } + + /// List all commits in a pull request (from base..head). + pub async fn pr_commits_list( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + // Clone repo once so it can be used in both spawn_blocking calls + let repo_for_base = repo.clone(); + let repo_for_commits = repo.clone(); + + let (base_oid, head_oid) = tokio::task::spawn_blocking({ + let pr = pr.clone(); + move || { + let domain = crate::git::GitDomain::from_model(repo_for_base)?; + + let base_ref = if pr.base.starts_with("refs/") { + pr.base.clone() + } else { + format!("refs/heads/{}", pr.base) + }; + let head_ref = if pr.head.starts_with("refs/") { + pr.head.clone() + } else { + format!("refs/heads/{}", pr.head) + }; + + let base_oid = domain + .ref_target(&base_ref) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!( + "Base branch '{}' not found", + pr.base + )) + })?; + + let head_oid = domain + .ref_target(&head_ref) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!( + "Head branch '{}' not found", + pr.head + )) + })?; + + Ok::<_, crate::git::GitError>((base_oid, head_oid)) + } + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + let commits = tokio::task::spawn_blocking(move || { + let domain = crate::git::GitDomain::from_model(repo_for_commits)?; + let range = format!("{}..{}", base_oid, head_oid); + let metas = domain.commit_log(Some(&range), 0, 500)?; + Ok::<_, crate::git::GitError>(metas) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + let commits: Vec = + commits.into_iter().map(PrCommitResponse::from).collect(); + + Ok(PrCommitsListResponse { commits }) + } + + /// Get the side-by-side diff for a pull request. + /// + /// Resolves the PR's base and head branch refs to commit OIDs and + /// generates a side-by-side diff suitable for UI rendering. + pub async fn pr_diff_side_by_side( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + query: SideBySideDiffQuery, + ctx: &Session, + ) -> Result { + let repo = self + .utils_find_repo(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + let (base_oid, head_oid) = tokio::task::spawn_blocking(move || { + let domain = crate::git::GitDomain::from_model(repo)?; + + let base_oid = domain + .branch_target(&pr.base) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!("Branch '{}' not found", pr.base)) + })?; + + let head_oid = domain + .branch_target(&pr.head) + .map_err(|e| crate::git::GitError::Internal(e.to_string()))? + .ok_or_else(|| { + crate::git::GitError::NotFound(format!("Branch '{}' not found", pr.head)) + })?; + + Ok::<_, crate::git::GitError>((base_oid, head_oid)) + }) + .await + .map_err(|e| AppError::InternalServerError(format!("Task join error: {e}")))? + .map_err(AppError::from)?; + + let diff_query = SideBySideDiffQuery { + base: base_oid.to_string(), + head: head_oid.to_string(), + pathspec: query.pathspec, + context_lines: query.context_lines, + }; + + self.git_diff_side_by_side(namespace, repo_name, diff_query, ctx) + .await + } +} diff --git a/libs/service/pull_request/review.rs b/libs/service/pull_request/review.rs new file mode 100644 index 0000000..558f871 --- /dev/null +++ b/libs/service/pull_request/review.rs @@ -0,0 +1,328 @@ +use crate::AppService; +use crate::error::AppError; +use crate::project::activity::ActivityLogParams; +use chrono::Utc; +use models::projects::{MemberRole, project_members}; +use models::pull_request::{ReviewState, pull_request, pull_request_review}; +use models::repos::repo; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewSubmitRequest { + pub body: Option, + pub state: String, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewUpdateRequest { + pub body: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewResponse { + pub repo: Uuid, + pub number: i64, + pub reviewer: Uuid, + pub reviewer_username: Option, + pub state: String, + pub body: Option, + pub submitted_at: Option>, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for ReviewResponse { + fn from(r: pull_request_review::Model) -> Self { + Self { + repo: r.repo, + number: r.number, + reviewer: r.reviewer, + reviewer_username: None, + state: r.state, + body: r.body, + submitted_at: r.submitted_at, + created_at: r.created_at, + updated_at: r.updated_at, + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewListResponse { + pub reviews: Vec, +} + +impl AppService { + /// List all reviews on a pull request. + pub async fn review_list( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let reviews = pull_request_review::Entity::find() + .filter(pull_request_review::Column::Repo.eq(repo.id)) + .filter(pull_request_review::Column::Number.eq(pr_number)) + .order_by_asc(pull_request_review::Column::SubmittedAt) + .all(&self.db) + .await?; + + let reviewer_ids: Vec = reviews.iter().map(|r| r.reviewer).collect(); + let reviewers = if reviewer_ids.is_empty() { + vec![] + } else { + models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(reviewer_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = reviews + .into_iter() + .map(|r| { + let username = reviewers + .iter() + .find(|u| u.uid == r.reviewer) + .map(|u| u.username.clone()); + ReviewResponse { + reviewer_username: username, + ..ReviewResponse::from(r) + } + }) + .collect(); + + Ok(ReviewListResponse { reviews: responses }) + } + + /// Submit a review on a pull request. + /// Any project member can submit a review. + /// Updates existing pending review if found, otherwise creates new. + pub async fn review_submit( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + request: ReviewSubmitRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Verify PR exists + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + // Cannot review merged PRs + if pr.status == models::pull_request::PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot review a merged pull request".to_string(), + )); + } + + // Parse and validate review state + let state: ReviewState = request.state.parse().map_err(|_| { + AppError::BadRequest( + "Invalid review state, expected: pending, approved, changes_requested, comment" + .to_string(), + ) + })?; + + let now = Utc::now(); + let submitted_at = if state == ReviewState::Pending { + None + } else { + Some(now) + }; + + // Check if reviewer already has a review + let existing = pull_request_review::Entity::find() + .filter(pull_request_review::Column::Repo.eq(repo.id)) + .filter(pull_request_review::Column::Number.eq(pr_number)) + .filter(pull_request_review::Column::Reviewer.eq(user_uid)) + .one(&self.db) + .await?; + + let model = if let Some(existing) = existing { + let mut active: pull_request_review::ActiveModel = existing.into(); + active.state = Set(state.to_string()); + if let Some(body) = request.body { + active.body = Set(Some(body)); + } + active.submitted_at = Set(submitted_at); + active.updated_at = Set(now); + active.update(&self.db).await? + } else { + let active = pull_request_review::ActiveModel { + repo: Set(repo.id), + number: Set(pr_number), + reviewer: Set(user_uid), + state: Set(state.to_string()), + body: Set(request.body), + submitted_at: Set(submitted_at), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + active.insert(&self.db).await? + }; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + let reviewer_name = username.clone().unwrap_or_else(|| user_uid.to_string()); + let _ = self + .project_log_activity( + repo.project, + Some(repo.id), + user_uid, + ActivityLogParams { + event_type: "pr_review".to_string(), + title: format!("{} reviewed PR #{}: {}", reviewer_name, pr_number, state), + repo_id: Some(repo.id), + content: None, + event_id: None, + event_sub_id: Some(pr_number), + metadata: Some(serde_json::json!({ + "pr_number": pr_number, + "review_state": state.to_string(), + })), + is_private: false, + }, + ) + .await; + Ok(ReviewResponse { + reviewer_username: username.clone(), + ..ReviewResponse::from(model.clone()) + }) + } + + /// Update a review body. Only the reviewer themselves OR admin/owner. + pub async fn review_update( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + request: ReviewUpdateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let review = pull_request_review::Entity::find() + .filter(pull_request_review::Column::Repo.eq(repo.id)) + .filter(pull_request_review::Column::Number.eq(pr_number)) + .filter(pull_request_review::Column::Reviewer.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Review not found".to_string()))?; + + // Permission: reviewer OR admin/owner + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_reviewer = review.reviewer == user_uid; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_reviewer && !is_admin { + return Err(AppError::NoPower); + } + + let mut active: pull_request_review::ActiveModel = review.clone().into(); + if let Some(body) = request.body { + active.body = Set(Some(body)); + } + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(model.reviewer) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewResponse { + reviewer_username: username, + ..ReviewResponse::from(model) + }) + } + + /// Delete a review. Only the reviewer themselves OR admin/owner. + pub async fn review_delete( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + reviewer_id: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo: repo::Model = self + .utils_check_repo_admin(namespace.clone(), repo_name.clone(), ctx) + .await?; + + let review = pull_request_review::Entity::find() + .filter(pull_request_review::Column::Repo.eq(repo.id)) + .filter(pull_request_review::Column::Number.eq(pr_number)) + .filter(pull_request_review::Column::Reviewer.eq(reviewer_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Review not found".to_string()))?; + + // Permission: reviewer themselves OR admin/owner + let is_reviewer = review.reviewer == user_uid; + if !is_reviewer { + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + if role != MemberRole::Admin && role != MemberRole::Owner { + return Err(AppError::NoPower); + } + } + + // Cascade delete review comments + models::pull_request::PullRequestReviewComment::delete_many() + .filter(models::pull_request::pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(models::pull_request::pull_request_review_comment::Column::Number.eq(pr_number)) + .filter( + models::pull_request::pull_request_review_comment::Column::Review.eq(reviewer_id), + ) + .exec(&self.db) + .await?; + + pull_request_review::Entity::delete_by_id((repo.id, pr_number, reviewer_id)) + .exec(&self.db) + .await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + Ok(()) + } +} diff --git a/libs/service/pull_request/review_comment.rs b/libs/service/pull_request/review_comment.rs new file mode 100644 index 0000000..276c9da --- /dev/null +++ b/libs/service/pull_request/review_comment.rs @@ -0,0 +1,561 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{MemberRole, project_members}; +use models::pull_request::{pull_request, pull_request_review_comment}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewCommentCreateRequest { + pub body: String, + pub review: Option, + pub path: Option, + pub side: Option, + pub line: Option, + pub old_line: Option, + /// ID of the parent comment to reply to (null = root comment). + pub in_reply_to: Option, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewCommentUpdateRequest { + pub body: String, +} + +/// Body for replying to an existing review comment thread. +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewCommentReplyRequest { + pub body: String, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewCommentResponse { + pub repo: Uuid, + pub number: i64, + pub id: i64, + pub review: Option, + pub path: Option, + pub side: Option, + pub line: Option, + pub old_line: Option, + pub body: String, + pub author: Uuid, + pub author_username: Option, + pub resolved: bool, + pub in_reply_to: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for ReviewCommentResponse { + fn from(c: pull_request_review_comment::Model) -> Self { + Self { + repo: c.repo, + number: c.number, + id: c.id, + review: c.review, + path: c.path, + side: c.side, + line: c.line, + old_line: c.old_line, + body: c.body, + author: c.author, + author_username: None, + resolved: c.resolved, + in_reply_to: c.in_reply_to, + created_at: c.created_at, + updated_at: c.updated_at, + } + } +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewCommentListQuery { + /// Filter comments by file path (e.g. "src/main.rs"). + pub path: Option, + /// Filter by resolved status. Omit to return all comments. + pub resolved: Option, + /// If true, only return inline comments (those with a `path` set). + /// If false, only return general comments (no path). + /// Omit to return all comments. + pub file_only: Option, +} + +/// A review comment thread: one root comment plus all its replies. +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewCommentThread { + pub root: ReviewCommentResponse, + pub replies: Vec, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewCommentListResponse { + /// Flat list of all comments (kept for backward compatibility). + pub comments: Vec, + /// Comments grouped into threads (root comments with their replies). + pub threads: Vec, + pub total: i64, +} + +impl AppService { + /// List review comments on a pull request, optionally filtered and grouped into threads. + pub async fn review_comment_list( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + query: ReviewCommentListQuery, + ctx: &Session, + ) -> Result { + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let mut stmt = pull_request_review_comment::Entity::find() + .filter(pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(pull_request_review_comment::Column::Number.eq(pr_number)) + .order_by_asc(pull_request_review_comment::Column::Path) + .order_by_asc(pull_request_review_comment::Column::Line) + .order_by_asc(pull_request_review_comment::Column::CreatedAt); + + if let Some(ref path) = query.path { + stmt = stmt.filter(pull_request_review_comment::Column::Path.eq(path.clone())); + } + if let Some(resolved) = query.resolved { + stmt = stmt.filter(pull_request_review_comment::Column::Resolved.eq(resolved)); + } + if query.file_only == Some(true) { + stmt = stmt.filter(pull_request_review_comment::Column::Path.is_not_null()); + } else if query.file_only == Some(false) { + stmt = stmt.filter(pull_request_review_comment::Column::Path.is_null()); + } + + let comments = stmt.all(&self.db).await?; + + let total = comments.len() as i64; + + let author_ids: Vec = comments.iter().map(|c| c.author).collect(); + let authors = if author_ids.is_empty() { + vec![] + } else { + models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(author_ids)) + .all(&self.db) + .await? + }; + + let responses: Vec = comments + .iter() + .map(|c| { + let username = authors + .iter() + .find(|u| u.uid == c.author) + .map(|u| u.username.clone()); + ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(c.clone()) + } + }) + .collect(); + + // Group into threads: root comments (in_reply_to IS NULL) with their replies. + let mut threads: Vec = Vec::new(); + + // Build a map of parent_comment_id → list of reply responses. + let mut reply_map: std::collections::HashMap> = + std::collections::HashMap::new(); + + for comment in &responses { + if let Some(parent_id) = comment.in_reply_to { + reply_map + .entry(parent_id) + .or_default() + .push(comment.clone()); + } + } + + // Root comments are those with no parent. + for comment in &responses { + if comment.in_reply_to.is_none() { + let replies = reply_map.remove(&comment.id).unwrap_or_default(); + threads.push(ReviewCommentThread { + root: comment.clone(), + replies, + }); + } + } + + // Sort threads: by file path, then by line number of root comment. + threads.sort_by(|a, b| { + let path_cmp = a.root.path.cmp(&b.root.path); + if path_cmp != std::cmp::Ordering::Equal { + return path_cmp; + } + a.root.line.cmp(&b.root.line) + }); + + Ok(ReviewCommentListResponse { + comments: responses, + threads, + total, + }) + } + + pub async fn review_comment_create( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + request: ReviewCommentCreateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Verify PR exists + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + if pr.status == models::pull_request::PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot comment on a merged pull request".to_string(), + )); + } + + // Get next comment id for this PR + let max_id: Option> = pull_request_review_comment::Entity::find() + .filter(pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(pull_request_review_comment::Column::Number.eq(pr_number)) + .select_only() + .column_as(pull_request_review_comment::Column::Id.max(), "max_id") + .into_tuple::>() + .one(&self.db) + .await?; + let comment_id = max_id.flatten().unwrap_or(0) + 1; + + let now = Utc::now(); + let active = pull_request_review_comment::ActiveModel { + repo: Set(repo.id), + number: Set(pr_number), + id: Set(comment_id), + review: Set(request.review), + path: Set(request.path), + side: Set(request.side), + line: Set(request.line), + old_line: Set(request.old_line), + body: Set(request.body), + author: Set(user_uid), + resolved: Set(false), + in_reply_to: Set(request.in_reply_to), + created_at: Set(now), + updated_at: Set(now), + }; + let model = active.insert(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(model) + }) + } + + pub async fn review_comment_update( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + comment_id: i64, + request: ReviewCommentUpdateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let comment = + pull_request_review_comment::Entity::find_by_id((repo.id, pr_number, comment_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + // Permission: author OR admin/owner + let is_author = comment.author == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + let mut active: pull_request_review_comment::ActiveModel = comment.clone().into(); + active.body = Set(request.body); + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(model.author) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(model) + }) + } + + pub async fn review_comment_delete( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let comment = + pull_request_review_comment::Entity::find_by_id((repo.id, pr_number, comment_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + // Permission: author OR admin/owner + let is_author = comment.author == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_author && !is_admin { + return Err(AppError::NoPower); + } + + pull_request_review_comment::Entity::delete_by_id((repo.id, pr_number, comment_id)) + .exec(&self.db) + .await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + Ok(()) + } + + /// Mark a review comment (root of a thread) as resolved. + pub async fn review_comment_resolve( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let comment = + pull_request_review_comment::Entity::find_by_id((repo.id, pr_number, comment_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + self.check_comment_permission(&repo, &comment, user_uid) + .await?; + + let mut active: pull_request_review_comment::ActiveModel = comment.clone().into(); + active.resolved = Set(true); + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(model.author) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(model) + }) + } + + /// Mark a review comment thread as unresolved. + pub async fn review_comment_unresolve( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + comment_id: i64, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let comment = + pull_request_review_comment::Entity::find_by_id((repo.id, pr_number, comment_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Comment not found".to_string()))?; + + self.check_comment_permission(&repo, &comment, user_uid) + .await?; + + let mut active: pull_request_review_comment::ActiveModel = comment.clone().into(); + active.resolved = Set(false); + active.updated_at = Set(Utc::now()); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(model.author) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(model) + }) + } + + /// Reply to an existing review comment, creating a threaded reply. + pub async fn review_comment_reply( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + parent_comment_id: i64, + request: ReviewCommentReplyRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Verify PR exists and is not merged + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + if pr.status == models::pull_request::PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot comment on a merged pull request".to_string(), + )); + } + + // Verify parent comment exists + let parent = pull_request_review_comment::Entity::find_by_id(( + repo.id, + pr_number, + parent_comment_id, + )) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Parent comment not found".to_string()))?; + + // Get next comment id + let max_id: Option> = pull_request_review_comment::Entity::find() + .filter(pull_request_review_comment::Column::Repo.eq(repo.id)) + .filter(pull_request_review_comment::Column::Number.eq(pr_number)) + .select_only() + .column_as(pull_request_review_comment::Column::Id.max(), "max_id") + .into_tuple::>() + .one(&self.db) + .await?; + let comment_id = max_id.flatten().unwrap_or(0) + 1; + + let now = Utc::now(); + let active = pull_request_review_comment::ActiveModel { + repo: Set(repo.id), + number: Set(pr_number), + id: Set(comment_id), + review: Set(None), + path: Set(parent.path.clone()), + side: Set(parent.side.clone()), + line: Set(parent.line), + old_line: Set(parent.old_line), + body: Set(request.body), + author: Set(user_uid), + resolved: Set(false), + in_reply_to: Set(Some(parent.id)), + created_at: Set(now), + updated_at: Set(now), + }; + let model = active.insert(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let username = models::users::user::Entity::find_by_id(user_uid) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewCommentResponse { + author_username: username, + ..ReviewCommentResponse::from(model) + }) + } + + async fn check_comment_permission( + &self, + repo: &models::repos::repo::Model, + comment: &pull_request_review_comment::Model, + user_uid: Uuid, + ) -> Result<(), AppError> { + // Authors can always modify their own comments + if comment.author == user_uid { + return Ok(()); + } + + // Admins/owners can modify any comment + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if is_admin { + Ok(()) + } else { + Err(AppError::NoPower) + } + } +} diff --git a/libs/service/pull_request/review_request.rs b/libs/service/pull_request/review_request.rs new file mode 100644 index 0000000..db99188 --- /dev/null +++ b/libs/service/pull_request/review_request.rs @@ -0,0 +1,325 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{MemberRole, project_members}; +use models::pull_request::{PrStatus, pull_request, pull_request_review_request}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct ReviewRequestCreateRequest { + /// User ID of the reviewer to request. + pub reviewer: Uuid, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewRequestResponse { + pub repo: Uuid, + pub number: i64, + pub reviewer: Uuid, + pub reviewer_username: Option, + pub requested_by: Uuid, + pub requested_by_username: Option, + pub requested_at: chrono::DateTime, + pub dismissed_at: Option>, + pub dismissed_by: Option, + pub dismissed_by_username: Option, +} + +impl From for ReviewRequestResponse { + fn from(m: pull_request_review_request::Model) -> Self { + Self { + repo: m.repo, + number: m.number, + reviewer: m.reviewer, + reviewer_username: None, + requested_by: m.requested_by, + requested_by_username: None, + requested_at: m.requested_at, + dismissed_at: m.dismissed_at, + dismissed_by: m.dismissed_by, + dismissed_by_username: None, + } + } +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ReviewRequestListResponse { + pub requests: Vec, + pub total: i64, +} + +impl AppService { + /// Request a review from a specific user for a pull request. + /// Any PR collaborator can request a review. + pub async fn review_request_create( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + request: ReviewRequestCreateRequest, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + // Verify PR exists + let pr = pull_request::Entity::find() + .filter(pull_request::Column::Repo.eq(repo.id)) + .filter(pull_request::Column::Number.eq(pr_number)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Pull request not found".to_string()))?; + + if pr.status == PrStatus::Merged.to_string() { + return Err(AppError::BadRequest( + "Cannot request review on a merged pull request".to_string(), + )); + } + + // Check: reviewer must be a project member or collaborator + let is_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(request.reviewer)) + .one(&self.db) + .await? + .is_some(); + + let is_collaborator = models::repos::repo_collaborator::Entity::find() + .filter(models::repos::repo_collaborator::Column::Repo.eq(repo.id)) + .filter(models::repos::repo_collaborator::Column::User.eq(request.reviewer)) + .one(&self.db) + .await? + .is_some(); + + if !is_member && !is_collaborator { + return Err(AppError::NoPower); + } + + let now = Utc::now(); + + // Upsert: update requested_at if request already exists + let existing = pull_request_review_request::Entity::find() + .filter(pull_request_review_request::Column::Repo.eq(repo.id)) + .filter(pull_request_review_request::Column::Number.eq(pr_number)) + .filter(pull_request_review_request::Column::Reviewer.eq(request.reviewer)) + .one(&self.db) + .await?; + + let model = if let Some(existing) = existing { + let mut active: pull_request_review_request::ActiveModel = existing.into(); + active.requested_by = Set(user_uid); + active.requested_at = Set(now); + active.dismissed_at = Set(None); + active.dismissed_by = Set(None); + active.update(&self.db).await? + } else { + let active = pull_request_review_request::ActiveModel { + repo: Set(repo.id), + number: Set(pr_number), + reviewer: Set(request.reviewer), + requested_by: Set(user_uid), + requested_at: Set(now), + dismissed_at: Set(None), + dismissed_by: Set(None), + }; + active.insert(&self.db).await? + }; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + // Load usernames + let reviewer_username = models::users::user::Entity::find_by_id(model.reviewer) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + let requested_by_username = models::users::user::Entity::find_by_id(model.requested_by) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + + Ok(ReviewRequestResponse { + reviewer_username, + requested_by_username, + ..ReviewRequestResponse::from(model) + }) + } + + /// List all review requests for a pull request. + pub async fn review_request_list( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + ctx: &Session, + ) -> Result { + let _user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let requests = pull_request_review_request::Entity::find() + .filter(pull_request_review_request::Column::Repo.eq(repo.id)) + .filter(pull_request_review_request::Column::Number.eq(pr_number)) + .order_by_asc(pull_request_review_request::Column::RequestedAt) + .all(&self.db) + .await?; + + let total = requests.len() as i64; + + // Batch load usernames + let all_ids: Vec = requests + .iter() + .flat_map(|r| { + std::iter::once(r.reviewer) + .chain(std::iter::once(r.requested_by)) + .chain(r.dismissed_by) + }) + .collect(); + let users = if all_ids.is_empty() { + vec![] + } else { + models::users::user::Entity::find() + .filter(models::users::user::Column::Uid.is_in(all_ids)) + .all(&self.db) + .await? + }; + + let username_map: std::collections::HashMap = + users.into_iter().map(|u| (u.uid, u.username)).collect(); + + let responses: Vec = requests + .into_iter() + .map(|r| ReviewRequestResponse { + reviewer_username: username_map.get(&r.reviewer).cloned(), + requested_by_username: username_map.get(&r.requested_by).cloned(), + dismissed_by_username: r.dismissed_by.and_then(|id| username_map.get(&id).cloned()), + ..ReviewRequestResponse::from(r) + }) + .collect(); + + Ok(ReviewRequestListResponse { + requests: responses, + total, + }) + } + + pub async fn review_request_delete( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + reviewer: Uuid, + ctx: &Session, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let existing = + pull_request_review_request::Entity::find_by_id((repo.id, pr_number, reviewer)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Review request not found".to_string()))?; + + // Permission: requested_by user OR admin/owner + let is_requester = existing.requested_by == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_requester && !is_admin { + return Err(AppError::NoPower); + } + + pull_request_review_request::Entity::delete_by_id((repo.id, pr_number, reviewer)) + .exec(&self.db) + .await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + Ok(()) + } + + /// Dismiss (mark as no longer needed) a pending review request. + /// Unlike delete, this records who dismissed it and when. + pub async fn review_request_dismiss( + &self, + namespace: String, + repo_name: String, + pr_number: i64, + reviewer: Uuid, + ctx: &Session, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, ctx).await?; + + let existing = + pull_request_review_request::Entity::find_by_id((repo.id, pr_number, reviewer)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Review request not found".to_string()))?; + + // Permission: requested_by user OR admin/owner + let is_requester = existing.requested_by == user_uid; + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + let is_admin = role == MemberRole::Admin || role == MemberRole::Owner; + + if !is_requester && !is_admin { + return Err(AppError::NoPower); + } + + let now = Utc::now(); + let mut active: pull_request_review_request::ActiveModel = existing.into(); + active.dismissed_at = Set(Some(now)); + active.dismissed_by = Set(Some(user_uid)); + let model = active.update(&self.db).await?; + + super::invalidate_pr_cache(&self.cache, repo.id, pr_number).await; + + let reviewer_username = models::users::user::Entity::find_by_id(model.reviewer) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + let requested_by_username = models::users::user::Entity::find_by_id(model.requested_by) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username); + let dismissed_by_username = if let Some(id) = model.dismissed_by { + models::users::user::Entity::find_by_id(id) + .one(&self.db) + .await + .ok() + .flatten() + .map(|u| u.username) + } else { + None + }; + + Ok(ReviewRequestResponse { + reviewer_username, + requested_by_username, + dismissed_by_username, + ..ReviewRequestResponse::from(model) + }) + } +} diff --git a/libs/service/search/mod.rs b/libs/service/search/mod.rs new file mode 100644 index 0000000..2435394 --- /dev/null +++ b/libs/service/search/mod.rs @@ -0,0 +1,3 @@ +pub mod service; + +pub use service::*; diff --git a/libs/service/search/service.rs b/libs/service/search/service.rs new file mode 100644 index 0000000..f51d66a --- /dev/null +++ b/libs/service/search/service.rs @@ -0,0 +1,468 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use db::database::AppDatabase; +use models::issues::issue; +use models::projects::{project, project_members}; +use models::repos::repo; +use models::users::user; +use sea_orm::*; +use sea_query::{Expr as SqExpr, extension::postgres::PgExpr}; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +// ─── Request / Response types ──────────────────────────────────────────────── + +#[derive(Debug, Clone, Deserialize, utoipa::IntoParams)] +pub struct SearchQuery { + /// Search keyword (matches against name, title, description, etc.) + #[param(min_length = 1, max_length = 200)] + pub q: String, + /// Comma-separated list of entity types to search. + /// Supported: projects, repos, issues, users. + /// Default: all types. + pub r#type: Option, + /// Page number (1-indexed). Default: 1. + pub page: Option, + /// Results per page per type. Default: 20, max: 100. + pub per_page: Option, +} + +fn parse_types(types: Option) -> Vec { + match types { + None => vec![ + "projects".into(), + "repos".into(), + "issues".into(), + "users".into(), + ], + Some(s) => { + let s = s.to_lowercase(); + let mut out = vec![]; + for t in s.split(',') { + let t = t.trim(); + if t == "projects" || t == "repos" || t == "issues" || t == "users" { + out.push(t.into()); + } + } + if out.is_empty() { + vec![ + "projects".into(), + "repos".into(), + "issues".into(), + "users".into(), + ] + } else { + out + } + } + } +} + +fn build_like_pattern(q: &str) -> String { + format!("%{}%", q.trim()) +} + +// ─── Per-type result items ─────────────────────────────────────────────────── + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct ProjectSearchItem { + pub uid: Uuid, + pub name: String, + pub display_name: String, + pub description: Option, + pub avatar_url: Option, + pub is_public: bool, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct RepoSearchItem { + pub uid: Uuid, + pub name: String, + pub description: Option, + pub project_uid: Uuid, + pub project_name: String, + pub is_private: bool, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct IssueSearchItem { + pub uid: Uuid, + pub number: i64, + pub title: String, + pub body: Option, + pub state: String, + pub project_uid: Uuid, + pub project_name: String, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct UserSearchItem { + pub uid: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub organization: Option, + pub created_at: DateTime, +} + +// ─── Per-type result set ───────────────────────────────────────────────────── + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct SearchResultSet { + pub items: Vec, + pub total: i64, + pub page: u32, + pub per_page: u32, +} + +impl SearchResultSet { + fn new(items: Vec, total: i64, page: u32, per_page: u32) -> Self { + Self { + items, + total, + page, + per_page, + } + } +} + +// ─── Aggregated response ────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct SearchResponse { + pub query: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub projects: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub repos: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub issues: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub users: Option>, +} + +// ─── Permission helpers ─────────────────────────────────────────────────────── + +/// Returns the set of project IDs the current user can access (for filtering). +/// If user is None (anonymous), returns only public project IDs. +async fn accessible_project_ids( + db: &AppDatabase, + user_id: Option, +) -> Result, AppError> { + let public_projects: Vec = project::Entity::find() + .filter(project::Column::IsPublic.eq(true)) + .select_only() + .column(project::Column::Id) + .into_tuple::() + .all(db) + .await + .map_err(|_| AppError::InternalError)?; + + let Some(user_id) = user_id else { + // Anonymous: only public projects + return Ok(public_projects); + }; + + let memberships: Vec = project_members::Entity::find() + .filter(project_members::Column::User.eq(user_id)) + .select_only() + .column(project_members::Column::Project) + .into_tuple::() + .all(db) + .await + .map_err(|_| AppError::InternalError)?; + + let mut all: Vec = public_projects; + for id in memberships { + if !all.iter().any(|x| x == &id) { + all.push(id); + } + } + Ok(all) +} + +impl AppService { + pub async fn search( + &self, + ctx: &Session, + params: SearchQuery, + ) -> Result { + let page = Ord::max(params.page.unwrap_or(1), 1u32); + let per_page = Ord::min(params.per_page.unwrap_or(20), 100u32); + let query = params.q.trim(); + if query.is_empty() { + return Err(AppError::BadRequest("q is required".to_string())); + } + + let types = parse_types(params.r#type); + let user_id = ctx.user(); + let accessible = accessible_project_ids(&self.db, user_id).await?; + + let mut resp = SearchResponse { + query: query.to_string(), + projects: None, + repos: None, + issues: None, + users: None, + }; + + if types.iter().any(|t| t == "projects") { + match self + .search_projects(query, &accessible, page, per_page) + .await + { + Ok(set) => resp.projects = Some(set), + Err(_) => {} + } + } + if types.iter().any(|t| t == "repos") { + if let Ok(set) = self.search_repos(query, &accessible, page, per_page).await { + resp.repos = Some(set); + } + } + if types.iter().any(|t| t == "issues") { + if let Ok(set) = self.search_issues(query, &accessible, page, per_page).await { + resp.issues = Some(set); + } + } + if types.iter().any(|t| t == "users") { + if let Ok(users) = self.search_users(query, page, per_page).await { + resp.users = Some(users); + } + } + + Ok(resp) + } + + async fn search_projects( + &self, + query: &str, + accessible: &[Uuid], + page: u32, + per_page: u32, + ) -> Result, AppError> { + let offset = (page - 1) * per_page; + let pattern = build_like_pattern(query); + + // OR filter: Name ILIKE q OR DisplayName ILIKE q OR Description ILIKE q + let or_filter = SqExpr::col(project::Column::Name) + .ilike(&pattern) + .or(SqExpr::col(project::Column::DisplayName).ilike(&pattern)) + .or(SqExpr::col(project::Column::Description).ilike(&pattern)); + + // Count + let total: i64 = project::Entity::find() + .filter(project::Column::Id.is_in(accessible.iter().cloned().collect::>())) + .filter(or_filter.clone()) + .count(&self.db) + .await + .map_err(|_| AppError::InternalError)? as i64; + + // Fetch + let items: Vec = project::Entity::find() + .filter(project::Column::Id.is_in(accessible.iter().cloned().collect::>())) + .filter(or_filter) + .order_by_desc(project::Column::UpdatedAt) + .offset(Some(offset as u64)) + .limit(Some(per_page as u64)) + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)? + .into_iter() + .map(|p| ProjectSearchItem { + uid: p.id, + name: p.name, + display_name: p.display_name, + description: p.description, + avatar_url: p.avatar_url, + is_public: p.is_public, + created_at: p.created_at, + updated_at: p.updated_at, + }) + .collect(); + + Ok(SearchResultSet::new(items, total, page, per_page)) + } + + async fn search_repos( + &self, + query: &str, + accessible: &[Uuid], + page: u32, + per_page: u32, + ) -> Result, AppError> { + let offset = (page - 1) * per_page; + let pattern = build_like_pattern(query); + + // OR filter: RepoName ILIKE q OR Description ILIKE q + let repo_or = SqExpr::col(repo::Column::RepoName) + .ilike(&pattern) + .or(SqExpr::col(repo::Column::Description).ilike(&pattern)); + + // Count + let total: i64 = repo::Entity::find() + .filter(repo::Column::Project.is_in(accessible.iter().cloned().collect::>())) + .filter(repo_or.clone()) + .count(&self.db) + .await + .map_err(|_| AppError::InternalError)? as i64; + + // Fetch + let repos: Vec = repo::Entity::find() + .filter(repo::Column::Project.is_in(accessible.iter().cloned().collect::>())) + .filter(repo_or) + .order_by_desc(repo::Column::UpdatedAt) + .offset(Some(offset as u64)) + .limit(Some(per_page as u64)) + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)?; + + // Batch-fetch project names + let project_ids: Vec = repos.iter().map(|r| r.project).collect(); + let project_names: std::collections::HashMap = project::Entity::find() + .filter(project::Column::Id.is_in(project_ids.clone())) + .into_tuple::<(Uuid, String)>() + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)? + .into_iter() + .collect(); + + let items: Vec = repos + .into_iter() + .map(|r| RepoSearchItem { + uid: r.id, + name: r.repo_name, + description: r.description, + project_uid: r.project, + project_name: project_names + .get(&r.project) + .cloned() + .unwrap_or_else(|| r.project.to_string()), + is_private: r.is_private, + created_at: r.created_at, + }) + .collect(); + + Ok(SearchResultSet::new(items, total, page, per_page)) + } + + async fn search_issues( + &self, + query: &str, + accessible: &[Uuid], + page: u32, + per_page: u32, + ) -> Result, AppError> { + let offset = (page - 1) * per_page; + let pattern = build_like_pattern(query); + + // OR filter: Title ILIKE q OR Body ILIKE q + let issue_or = SqExpr::col(issue::Column::Title) + .ilike(&pattern) + .or(SqExpr::col(issue::Column::Body).ilike(&pattern)); + + // Count + let total: i64 = issue::Entity::find() + .filter(issue::Column::Project.is_in(accessible.iter().cloned().collect::>())) + .filter(issue_or.clone()) + .count(&self.db) + .await + .map_err(|_| AppError::InternalError)? as i64; + + // Fetch + let issues: Vec = issue::Entity::find() + .filter(issue::Column::Project.is_in(accessible.iter().cloned().collect::>())) + .filter(issue_or) + .order_by_desc(issue::Column::UpdatedAt) + .offset(Some(offset as u64)) + .limit(Some(per_page as u64)) + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)?; + + // Batch-fetch project names + let project_ids: Vec = issues.iter().map(|i| i.project).collect(); + let project_names: std::collections::HashMap = project::Entity::find() + .filter(project::Column::Id.is_in(project_ids.clone())) + .into_tuple::<(Uuid, String)>() + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)? + .into_iter() + .collect(); + + let items: Vec = issues + .into_iter() + .map(|i| IssueSearchItem { + uid: i.id, + number: i.number, + title: i.title, + body: i.body, + state: i.state, + project_uid: i.project, + project_name: project_names + .get(&i.project) + .cloned() + .unwrap_or_else(|| i.project.to_string()), + created_at: i.created_at, + updated_at: i.updated_at, + }) + .collect(); + + Ok(SearchResultSet::new(items, total, page, per_page)) + } + + async fn search_users( + &self, + query: &str, + page: u32, + per_page: u32, + ) -> Result, AppError> { + let offset = (page - 1) * per_page; + let pattern = build_like_pattern(query); + + // OR filter: Username ILIKE q OR DisplayName ILIKE q + let user_or = SqExpr::col(user::Column::Username) + .ilike(&pattern) + .or(SqExpr::col(user::Column::DisplayName).ilike(&pattern)); + + // Count + let total: i64 = user::Entity::find() + .filter(user_or.clone()) + .count(&self.db) + .await + .map_err(|_| AppError::InternalError)? as i64; + + // Fetch + let items: Vec = user::Entity::find() + .filter(user_or) + .order_by_desc(user::Column::LastSignInAt) + .offset(Some(offset as u64)) + .limit(Some(per_page as u64)) + .all(&self.db) + .await + .map_err(|_| AppError::InternalError)? + .into_iter() + .map(|u| UserSearchItem { + uid: u.uid, + username: u.username, + display_name: u.display_name, + avatar_url: u.avatar_url, + organization: u.organization, + created_at: u.created_at, + }) + .collect(); + + Ok(SearchResultSet::new(items, total, page, per_page)) + } +} diff --git a/libs/service/skill/info.rs b/libs/service/skill/info.rs new file mode 100644 index 0000000..a4b0b1b --- /dev/null +++ b/libs/service/skill/info.rs @@ -0,0 +1,104 @@ +//! List and retrieve project skills. + +use crate::AppService; +use crate::error::AppError; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, QueryOrder}; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct SkillResponse { + pub id: i64, + pub project_uuid: String, + pub slug: String, + pub name: String, + pub description: Option, + pub source: String, + pub repo_id: Option, + pub commit_sha: Option, + pub blob_hash: Option, + pub content: String, + pub metadata: serde_json::Value, + pub enabled: bool, + pub created_by: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for SkillResponse { + fn from(s: models::projects::project_skill::Model) -> Self { + Self { + id: s.id, + project_uuid: s.project_uuid.to_string(), + slug: s.slug, + name: s.name, + description: s.description, + source: s.source, + repo_id: s.repo_id.map(|id| id.to_string()), + commit_sha: s.commit_sha, + blob_hash: s.blob_hash, + content: s.content, + metadata: s.metadata, + enabled: s.enabled, + created_by: s.created_by.map(|id| id.to_string()), + created_at: s.created_at, + updated_at: s.updated_at, + } + } +} + +#[derive(Debug, Clone, Deserialize, utoipa::IntoParams)] +pub struct SkillListQuery { + pub source: Option, + pub enabled: Option, +} + +impl AppService { + /// List all skills registered to a project. + pub async fn skill_list( + &self, + project_uuid: String, + query: SkillListQuery, + _ctx: &Session, + ) -> Result, AppError> { + let project_id = uuid::Uuid::parse_str(&project_uuid) + .map_err(|_| AppError::BadRequest("Invalid project UUID".to_string()))?; + + use models::projects::project_skill::Column as C; + let mut q = models::projects::project_skill::Entity::find() + .filter(C::ProjectUuid.eq(project_id)) + .order_by_asc(C::Name); + + if let Some(source) = &query.source { + q = q.filter(C::Source.eq(source.clone())); + } + if let Some(enabled) = query.enabled { + q = q.filter(C::Enabled.eq(enabled)); + } + + let skills = q.all(&self.db).await?; + Ok(skills.into_iter().map(SkillResponse::from).collect()) + } + + /// Get a single skill by slug within a project. + pub async fn skill_get( + &self, + project_uuid: String, + slug: String, + _ctx: &Session, + ) -> Result { + let project_id = uuid::Uuid::parse_str(&project_uuid) + .map_err(|_| AppError::BadRequest("Invalid project UUID".to_string()))?; + + use models::projects::project_skill::Column as C; + let skill = models::projects::project_skill::Entity::find() + .filter(C::ProjectUuid.eq(project_id)) + .filter(C::Slug.eq(slug)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Skill not found".to_string()))?; + + Ok(SkillResponse::from(skill)) + } +} diff --git a/libs/service/skill/manage.rs b/libs/service/skill/manage.rs new file mode 100644 index 0000000..079be2c --- /dev/null +++ b/libs/service/skill/manage.rs @@ -0,0 +1,174 @@ +//! Create, update, delete project skills. + +use crate::AppService; +use crate::error::AppError; +use super::info::SkillResponse; +use chrono::Utc; +use models::projects::project_skill::{Column as C, Entity as SkillEntity}; +use models::ActiveModelTrait; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, Set}; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct CreateSkillRequest { + pub slug: String, + pub name: Option, + pub description: Option, + pub content: String, + pub metadata: Option, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct UpdateSkillRequest { + pub name: Option, + pub description: Option, + pub content: Option, + pub metadata: Option, + pub enabled: Option, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub struct DeleteSkillResponse { + pub deleted: bool, + pub slug: String, +} + +fn validate_slug(slug: &str) -> Result<(), AppError> { + if slug.is_empty() || slug.len() > 255 { + return Err(AppError::BadRequest("Invalid slug".to_string())); + } + if !slug + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_') + { + return Err(AppError::BadRequest( + "Slug must contain only ASCII letters, numbers, hyphens, and underscores".to_string(), + )); + } + Ok(()) +} + +impl AppService { + /// Add a skill manually to a project. + pub async fn skill_create( + &self, + project_uuid: String, + request: CreateSkillRequest, + ctx: &Session, + ) -> Result { + validate_slug(&request.slug)?; + + let project_id = Uuid::parse_str(&project_uuid) + .map_err(|_| AppError::BadRequest("Invalid project UUID".to_string()))?; + + let user_id = ctx + .user() + .ok_or_else(|| AppError::Unauthorized)?; + + // Check for duplicate slug within project + let exists = SkillEntity::find() + .filter(C::ProjectUuid.eq(project_id)) + .filter(C::Slug.eq(&request.slug)) + .one(&self.db) + .await?; + if exists.is_some() { + return Err(AppError::Conflict(format!( + "Skill '{}' already exists in this project", + request.slug + ))); + } + + let now = Utc::now(); + let metadata = request.metadata.unwrap_or(serde_json::Value::Object(Default::default())); + let name = request.name.unwrap_or_else(|| request.slug.clone()); + + let active = models::projects::project_skill::ActiveModel { + id: Set(0), // auto-increment + project_uuid: Set(project_id), + slug: Set(request.slug), + name: Set(name), + description: Set(request.description), + source: Set("manual".to_string()), + repo_id: Set(None), + commit_sha: Set(None), + blob_hash: Set(None), + content: Set(request.content), + metadata: Set(metadata), + enabled: Set(true), + created_by: Set(Some(user_id)), + created_at: Set(now), + updated_at: Set(now), + }; + + let inserted = active.insert(&self.db).await?; + Ok(SkillResponse::from(inserted)) + } + + /// Update an existing skill. + pub async fn skill_update( + &self, + project_uuid: String, + slug: String, + request: UpdateSkillRequest, + _ctx: &Session, + ) -> Result { + let project_id = Uuid::parse_str(&project_uuid) + .map_err(|_| AppError::BadRequest("Invalid project UUID".to_string()))?; + + let skill = SkillEntity::find() + .filter(C::ProjectUuid.eq(project_id)) + .filter(C::Slug.eq(&slug)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Skill not found".to_string()))?; + + let mut active: models::projects::project_skill::ActiveModel = skill.into(); + if let Some(name) = request.name { + active.name = Set(name); + } + if let Some(description) = request.description { + active.description = Set(Some(description)); + } + if let Some(content) = request.content { + active.content = Set(content); + } + if let Some(metadata) = request.metadata { + active.metadata = Set(metadata); + } + if let Some(enabled) = request.enabled { + active.enabled = Set(enabled); + } + active.updated_at = Set(Utc::now()); + + let updated = active.update(&self.db).await?; + Ok(SkillResponse::from(updated)) + } + + /// Delete a skill from a project. + pub async fn skill_delete( + &self, + project_uuid: String, + slug: String, + _ctx: &Session, + ) -> Result { + let project_id = Uuid::parse_str(&project_uuid) + .map_err(|_| AppError::BadRequest("Invalid project UUID".to_string()))?; + + let skill = SkillEntity::find() + .filter(C::ProjectUuid.eq(project_id)) + .filter(C::Slug.eq(&slug)) + .one(&self.db) + .await? + .ok_or_else(|| AppError::NotFound("Skill not found".to_string()))?; + + let deleted = SkillEntity::delete_by_id(skill.id).exec(&self.db).await?; + + Ok(DeleteSkillResponse { + deleted: deleted.rows_affected > 0, + slug, + }) + } +} diff --git a/libs/service/skill/mod.rs b/libs/service/skill/mod.rs new file mode 100644 index 0000000..6ac8334 --- /dev/null +++ b/libs/service/skill/mod.rs @@ -0,0 +1,9 @@ +//! Skill management service. +//! +//! Handles listing, creating, updating, deleting, and auto-discovering +//! skills registered to a project. + +pub mod info; +pub mod manage; +pub mod scan; +pub mod scanner; diff --git a/libs/service/skill/scan.rs b/libs/service/skill/scan.rs new file mode 100644 index 0000000..37d901c --- /dev/null +++ b/libs/service/skill/scan.rs @@ -0,0 +1,53 @@ +//! Scan all repos in a project for skills. + +use crate::AppService; +use crate::error::AppError; +use models::repos::repo::Entity as RepoEntity; +use models::repos::repo::Column as RCol; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; +use uuid::Uuid; + +use super::scanner; + +impl AppService { + /// Scan all repositories in a project and sync skills from `.claude/skills/` directories. + /// + /// This is called automatically during `git hook sync` and can also be triggered + /// manually via `POST /api/projects/{name}/skills/scan`. + /// + /// - Discovers skills in each repo's `.claude/skills/` folder + /// - Upserts repo-sourced skills (source = "repo") + /// - Removes skills whose source repo no longer has the file + /// - Manual skills (source = "manual") are never affected + pub async fn skill_scan_repos( + &self, + project_uid: Uuid, + _caller_uid: Uuid, + ) -> Result { + // Collect all repo IDs for this project + let repos: Vec<_> = RepoEntity::find() + .filter(RCol::Project.eq(project_uid)) + .all(&self.db) + .await?; + + let mut total_created = 0i64; + let mut total_updated = 0i64; + let mut total_removed = 0i64; + let mut total_discovered = 0i64; + + for repo in repos { + let result = scanner::scan_and_sync_skills(&self.db, project_uid, &repo).await?; + total_created += result.created; + total_updated += result.updated; + total_removed += result.removed; + total_discovered += result.discovered; + } + + Ok(scanner::ScanSyncResult { + discovered: total_discovered, + created: total_created, + updated: total_updated, + removed: total_removed, + }) + } +} diff --git a/libs/service/skill/scanner.rs b/libs/service/skill/scanner.rs new file mode 100644 index 0000000..a4ce7f3 --- /dev/null +++ b/libs/service/skill/scanner.rs @@ -0,0 +1,238 @@ +//! Repository skill scanner. +//! +//! Scans repositories for SKILL.md files and upserts skill records. + +use crate::error::AppError; +use chrono::Utc; +use models::ActiveModelTrait; +use models::projects::project_skill::ActiveModel as SkillActiveModel; +use models::projects::project_skill::Column as C; +use models::projects::project_skill::Entity as SkillEntity; +use models::repos::repo::Model as RepoModel; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, Set}; +use sha1::Digest; +use std::path::Path; +use uuid::Uuid; + +/// Skill discovery result from a single repository. +#[derive(Debug)] +pub struct DiscoveredSkill { + /// URL-safe slug derived from the directory name. + pub slug: String, + /// Human-readable name (from frontmatter or slug). + pub name: String, + /// Short description (from frontmatter). + pub description: Option, + /// Raw markdown body after the frontmatter. + pub content: String, + /// Parsed frontmatter as JSON. + pub metadata: serde_json::Value, + /// Git commit SHA where this skill was found (git hook path only). + pub commit_sha: Option, + /// Git blob SHA-1 of the SKILL.md file. + pub blob_hash: Option, +} + +/// Compute the git blob SHA-1 hash of `content`. +/// Format: "blob {len}\0{data}" +fn git_blob_hash(content: &[u8]) -> String { + let size = content.len(); + let header = format!("blob {}\0", size); + let mut hasher = sha1::Sha1::new(); + hasher.update(header.as_bytes()); + hasher.update(content); + hex::encode(hasher.finalize()) +} + +/// Parse a SKILL.md file and extract metadata + content. +fn parse_skill_file(slug: &str, raw: &str) -> DiscoveredSkill { + let (frontmatter, content) = extract_frontmatter(raw); + + let metadata: serde_json::Value = frontmatter + .map(|fm| serde_json::from_str(fm).unwrap_or_default()) + .unwrap_or_default(); + + let name = metadata + .get("name") + .and_then(|v| v.as_str()) + .map(String::from) + .unwrap_or_else(|| slug.replace('-', " ").replace('_', " ")); + + let description = metadata + .get("description") + .and_then(|v| v.as_str()) + .map(String::from); + + DiscoveredSkill { + slug: slug.to_string(), + name, + description, + content: content.trim().to_string(), + metadata, + commit_sha: None, + blob_hash: None, + } +} + +/// Split frontmatter (--- ... ---) from markdown content. +fn extract_frontmatter(raw: &str) -> (Option<&str>, &str) { + let trimmed = raw.trim_start(); + if !trimmed.starts_with("---") { + return (None, trimmed); + } + if let Some(end) = trimmed[3..].find("---") { + let fm = &trimmed[3..end + 3]; + let rest = trimmed[3 + end + 3..].trim_start(); + (Some(fm), rest) + } else { + (None, trimmed) + } +} + +/// Recursively scan `repo_path` for `SKILL.md` files. +/// The skill slug is `{short_repo_id}/{parent_dir_name}` to ensure uniqueness across repos. +pub fn scan_repo_for_skills( + repo_path: &Path, + repo_id: Uuid, +) -> Result, AppError> { + let repo_id_prefix = &repo_id.to_string()[..8]; + let mut discovered = Vec::new(); + let mut stack = vec![repo_path.to_path_buf()]; + + while let Some(dir) = stack.pop() { + let entries = match std::fs::read_dir(&dir) { + Ok(e) => e, + Err(_) => continue, + }; + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + stack.push(path); + } else if path.file_name().and_then(|n| n.to_str()) == Some("SKILL.md") { + if let Some(dir_name) = path.parent() + .and_then(|p| p.file_name()) + .and_then(|n| n.to_str()) + .filter(|s| !s.starts_with('.')) + { + let slug = format!("{}/{}", repo_id_prefix, dir_name); + if let Ok(raw) = std::fs::read(&path) { + let blob_hash = git_blob_hash(&raw); + let mut skill = parse_skill_file(&slug, &String::from_utf8_lossy(&raw)); + skill.blob_hash = Some(blob_hash); + discovered.push(skill); + } + } + } + } + } + + Ok(discovered) +} + +/// Scan a git2::Repository for skills and upsert them into the database. +/// Called from the git hook sync path. +pub async fn scan_and_sync_skills( + db: &db::database::AppDatabase, + project_uuid: Uuid, + repo: &RepoModel, +) -> Result { + // Resolve the repo path + let storage_path = Path::new(&repo.storage_path); + let discovered = scan_repo_for_skills(storage_path, repo.id)?; + + if discovered.is_empty() { + return Ok(ScanSyncResult { + discovered: 0, + created: 0, + updated: 0, + removed: 0, + }); + } + + let now = Utc::now(); + let mut created = 0i64; + let mut updated = 0i64; + + // Collect all repo-sourced skills in this repo for this project + let existing: Vec<_> = SkillEntity::find() + .filter(C::ProjectUuid.eq(project_uuid)) + .filter(C::Source.eq("repo")) + .filter(C::RepoId.eq(repo.id)) + .all(db) + .await?; + + let existing_by_slug: std::collections::HashMap<_, _> = existing + .into_iter() + .map(|s| (s.slug.clone(), s)) + .collect(); + + let mut seen_slugs = std::collections::HashSet::new(); + + let discovered_count = discovered.len() as i64; + for skill in discovered { + seen_slugs.insert(skill.slug.clone()); + + let json_meta = serde_json::to_value(&skill.metadata).unwrap_or_default(); + + if let Some(existing_skill) = existing_by_slug.get(&skill.slug) { + if existing_skill.content != skill.content + || existing_skill.metadata != json_meta + || existing_skill.blob_hash != skill.blob_hash + { + let mut active: SkillActiveModel = existing_skill.clone().into(); + active.content = Set(skill.content); + active.metadata = Set(json_meta); + active.commit_sha = Set(skill.commit_sha.clone()); + active.blob_hash = Set(skill.blob_hash.clone()); + active.updated_at = Set(now); + active.update(db).await?; + updated += 1; + } + } else { + let active = SkillActiveModel { + id: Set(0), + project_uuid: Set(project_uuid), + slug: Set(skill.slug.clone()), + name: Set(skill.name), + description: Set(skill.description), + source: Set("repo".to_string()), + repo_id: Set(Some(repo.id)), + commit_sha: Set(skill.commit_sha.clone()), + blob_hash: Set(skill.blob_hash.clone()), + content: Set(skill.content), + metadata: Set(json_meta), + enabled: Set(true), + created_by: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + active.insert(db).await?; + created += 1; + } + } + + // Remove skills that no longer exist in the repo + let mut removed = 0i64; + for (slug, old_skill) in existing_by_slug { + if !seen_slugs.contains(&slug) { + SkillEntity::delete_by_id(old_skill.id).exec(db).await?; + removed += 1; + } + } + + Ok(ScanSyncResult { + discovered: discovered_count, + created, + updated, + removed, + }) +} + +/// Result of a scan + sync operation. +#[derive(Debug)] +pub struct ScanSyncResult { + pub discovered: i64, + pub created: i64, + pub updated: i64, + pub removed: i64, +} diff --git a/libs/service/user/access_key.rs b/libs/service/user/access_key.rs new file mode 100644 index 0000000..3627d05 --- /dev/null +++ b/libs/service/user/access_key.rs @@ -0,0 +1,241 @@ +use crate::AppService; +use crate::error::AppError; +use base64::Engine; +use chrono::Utc; +use models::users::{user_activity_log, user_token}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct CreateAccessKeyParams { + pub name: String, + pub scopes: Vec, + pub expires_at: Option>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct AccessKeyResponse { + pub id: i64, + pub name: String, + pub access_key: Option, + pub scopes: Vec, + pub expires_at: Option>, + pub is_revoked: bool, + pub created_at: chrono::DateTime, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct AccessKeyListResponse { + pub access_keys: Vec, + pub total: usize, +} + +impl AppService { + pub async fn user_create_access_key( + &self, + context: &Session, + params: CreateAccessKeyParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let access_key = self.user_generate_access_key(); + let access_key_hash = self.user_hash_access_key(&access_key); + + let access_key_model = user_token::ActiveModel { + user: Set(user_uid), + name: Set(params.name.clone()), + token_hash: Set(access_key_hash), + scopes: Set(serde_json::to_value(params.scopes.clone()).unwrap()), + expires_at: Set(params.expires_at), + is_revoked: Set(false), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + ..Default::default() + }; + + let created_access_key = access_key_model.insert(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("access_key_create".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "access_key_name": params.name.clone(), + "access_key_id": created_access_key.id, + "scopes": params.scopes.clone() + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + let scopes: Vec = + serde_json::from_value(created_access_key.scopes).unwrap_or_default(); + + Ok(AccessKeyResponse { + id: created_access_key.id, + name: created_access_key.name, + access_key: Some(access_key), + scopes, + expires_at: created_access_key.expires_at, + is_revoked: created_access_key.is_revoked, + created_at: created_access_key.created_at, + }) + } + + pub async fn user_list_access_keys( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let access_keys = user_token::Entity::find() + .filter(user_token::Column::User.eq(user_uid)) + .order_by_desc(user_token::Column::CreatedAt) + .all(&self.db) + .await?; + + let total = access_keys.len(); + let access_keys = access_keys + .into_iter() + .map(|ak| { + let scopes: Vec = serde_json::from_value(ak.scopes).unwrap_or_default(); + AccessKeyResponse { + id: ak.id, + name: ak.name, + access_key: None, + scopes, + expires_at: ak.expires_at, + is_revoked: ak.is_revoked, + created_at: ak.created_at, + } + }) + .collect(); + + Ok(AccessKeyListResponse { access_keys, total }) + } + + pub async fn user_revoke_access_key( + &self, + context: &Session, + access_key_id: i64, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let access_key = user_token::Entity::find_by_id(access_key_id) + .filter(user_token::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Access key not found".to_string()))?; + + let mut active_access_key: user_token::ActiveModel = access_key.clone().into(); + active_access_key.is_revoked = Set(true); + active_access_key.updated_at = Set(Utc::now()); + + active_access_key.update(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("access_key_revoke".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "access_key_name": access_key.name, + "access_key_id": access_key_id + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(()) + } + + pub async fn user_delete_access_key( + &self, + context: &Session, + access_key_id: i64, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let access_key = user_token::Entity::find_by_id(access_key_id) + .filter(user_token::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("Access key not found".to_string()))?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("access_key_delete".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "access_key_name": access_key.name.clone(), + "access_key_id": access_key_id + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + user_token::Entity::delete(access_key.into_active_model()) + .exec(&self.db) + .await?; + + Ok(()) + } + + pub async fn user_verify_access_key(&self, access_key: String) -> Result { + let access_key_hash = self.user_hash_access_key(&access_key); + + let access_key_model = user_token::Entity::find() + .filter(user_token::Column::TokenHash.eq(access_key_hash)) + .filter(user_token::Column::IsRevoked.eq(false)) + .one(&self.db) + .await? + .ok_or(AppError::Unauthorized)?; + + if let Some(expires_at) = access_key_model.expires_at { + if expires_at < Utc::now() { + return Err(AppError::Unauthorized); + } + } + + Ok(access_key_model.user) + } + + fn user_generate_access_key(&self) -> String { + use std::time::{SystemTime, UNIX_EPOCH}; + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_nanos(); + let chars: Vec = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + .chars() + .collect(); + let mut access_key = String::with_capacity(68); + access_key.push_str("gda_"); + let mut state = now as u64; + for _ in 0..64 { + state = state.wrapping_mul(1103515245).wrapping_add(12345); + access_key.push(chars[(state as usize) % chars.len()]); + } + access_key + } + + fn user_hash_access_key(&self, access_key: &str) -> String { + use sha2::{Digest, Sha256}; + let mut hasher = Sha256::new(); + hasher.update(access_key.as_bytes()); + format!( + "{}", + base64::prelude::BASE64_STANDARD.encode(&hasher.finalize().to_vec()) + ) + } +} diff --git a/libs/service/user/avatar.rs b/libs/service/user/avatar.rs new file mode 100644 index 0000000..dcc29d7 --- /dev/null +++ b/libs/service/user/avatar.rs @@ -0,0 +1,59 @@ +use crate::AppService; +use crate::error::AppError; +use models::users::{user, user_activity_log}; +use sea_orm::prelude::Expr; +use sea_orm::*; +use session::Session; +use std::io; + +impl AppService { + pub async fn user_avatar_upload( + &self, + context: Session, + file: Vec, + file_ext: &str, + ) -> Result { + let user_id = context.user().ok_or(AppError::Unauthorized)?; + let time = chrono::Utc::now().timestamp(); + let file_name = format!("{}-{}", user_id, time); + self.avatar + .upload(file, file_name.clone(), file_ext) + .await + .map_err(|e| { + AppError::Io(io::Error::new( + io::ErrorKind::Other, + format!("Failed to upload avatar: {}", e), + )) + })?; + let static_url = self + .config + .static_domain() + .unwrap_or_else(|_| "/static".to_string()); + let file_url = format!( + "{}/{}", + static_url.trim_end_matches('/'), + format!("{}.{}", file_name, file_ext) + ); + user::Entity::update_many() + .filter(user::Column::Uid.eq(user_id)) + .col_expr(user::Column::AvatarUrl, Expr::value(file_url.clone())) + .exec(&self.db) + .await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_id)), + action: Set("avatar_upload".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "file_url": file_url + })), + created_at: Set(chrono::Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(file_url) + } +} diff --git a/libs/service/user/chpc.rs b/libs/service/user/chpc.rs new file mode 100644 index 0000000..7eb4afb --- /dev/null +++ b/libs/service/user/chpc.rs @@ -0,0 +1,223 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{Duration, Local, NaiveDate}; +use models::repos::repo_commit; +use models::users::{user, user_email}; +use redis::AsyncCommands; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::{IntoParams, ToSchema}; + +/// Cache key prefix for contribution heatmap +const HEATMAP_CACHE_PREFIX: &str = "user:heatmap"; +/// Default cache TTL in seconds (5 minutes) +const HEATMAP_CACHE_TTL: u64 = 300; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct ContributionHeatmapItem { + pub date: String, + pub count: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct ContributionHeatmapResponse { + pub username: String, + pub total_contributions: i64, + pub heatmap: Vec, + pub start_date: String, + pub end_date: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct ContributionHeatmapQuery { + pub start_date: Option, + pub end_date: Option, +} + +impl AppService { + pub async fn get_user_contribution_heatmap( + &self, + _context: Session, + username: String, + query: ContributionHeatmapQuery, + ) -> Result { + let user = user::Entity::find() + .filter(user::Column::Username.eq(&username)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let (start_date, end_date) = self.parse_date_range(query.start_date, query.end_date)?; + + let cache_key = self.build_heatmap_cache_key(&user.uid, start_date, end_date); + if let Ok(mut conn) = self.cache.conn().await { + if let Ok(cached) = conn.get::<_, String>(cache_key.clone()).await { + if let Ok(cached) = serde_json::from_str::(&cached) { + return Ok(cached); + } + } + } + + let emails: Vec = user_email::Entity::find() + .filter(user_email::Column::User.eq(user.uid)) + .select_only() + .column(user_email::Column::Email) + .into_tuple::() + .all(&self.db) + .await?; + + if emails.is_empty() { + let response = ContributionHeatmapResponse { + username: user.username.clone(), + total_contributions: 0, + heatmap: vec![], + start_date: start_date.format("%Y-%m-%d").to_string(), + end_date: end_date.format("%Y-%m-%d").to_string(), + }; + return Ok(response); + } + + let start_dt = start_date.and_hms_opt(0, 0, 0).unwrap(); + let end_dt = end_date.and_hms_opt(23, 59, 59).unwrap(); + + let commits: Vec = repo_commit::Entity::find() + .filter(repo_commit::Column::AuthorEmail.is_in(emails.clone())) + .filter(repo_commit::Column::CreatedAt.gte(start_dt)) + .filter(repo_commit::Column::CreatedAt.lte(end_dt)) + .all(&self.db) + .await?; + + let mut heatmap_map: std::collections::HashMap = + std::collections::HashMap::new(); + for commit in &commits { + let date_str = commit.created_at.format("%Y-%m-%d").to_string(); + *heatmap_map.entry(date_str).or_insert(0) += 1; + } + + let total_contributions = commits.len() as i64; + + let mut heatmap: Vec = Vec::new(); + let mut current = start_date; + while current <= end_date { + let date_str = current.format("%Y-%m-%d").to_string(); + let count = *heatmap_map.get(&date_str).unwrap_or(&0); + heatmap.push(ContributionHeatmapItem { + date: date_str, + count: count as i32, + }); + current += Duration::days(1); + } + + let response = ContributionHeatmapResponse { + username: user.username, + total_contributions, + heatmap, + start_date: start_date.format("%Y-%m-%d").to_string(), + end_date: end_date.format("%Y-%m-%d").to_string(), + }; + + if let Ok(mut conn) = self.cache.conn().await { + let _: Option<()> = conn + .set_ex::( + cache_key, + serde_json::to_string(&response).unwrap_or_default(), + HEATMAP_CACHE_TTL, + ) + .await + .ok(); + } + + Ok(response) + } + + pub async fn invalidate_user_heatmap_cache( + &self, + user_uid: uuid::Uuid, + ) -> Result<(), AppError> { + // Invalidate all heatmap cache entries for a user + // Delete known date range keys (last 2 years) + if let Ok(mut conn) = self.cache.conn().await { + let today = Local::now().date_naive(); + let two_years_ago = today - Duration::days(730); + let mut current = two_years_ago; + while current <= today { + let key = self.build_heatmap_cache_key(&user_uid, current, current); + let _: Option<()> = conn.del::<_, ()>(key).await.ok(); + current += Duration::days(1); + } + } + Ok(()) + } + + fn build_heatmap_cache_key( + &self, + user_uid: &uuid::Uuid, + start_date: NaiveDate, + end_date: NaiveDate, + ) -> String { + format!( + "{}:{}:{}:{}", + HEATMAP_CACHE_PREFIX, + user_uid, + start_date.format("%Y-%m-%d"), + end_date.format("%Y-%m-%d"), + ) + } + + fn parse_date_range( + &self, + start_date_str: Option, + end_date_str: Option, + ) -> Result<(NaiveDate, NaiveDate), AppError> { + let today = Local::now().date_naive(); + let one_year_ago = today - Duration::days(365); + + let start_date = if let Some(date_str) = start_date_str { + NaiveDate::parse_from_str(&date_str, "%Y-%m-%d").map_err(|_| { + AppError::NotFound("Invalid start_date format, expected YYYY-MM-DD".to_string()) + })? + } else { + one_year_ago + }; + + let end_date = if let Some(date_str) = end_date_str { + NaiveDate::parse_from_str(&date_str, "%Y-%m-%d").map_err(|_| { + AppError::NotFound("Invalid end_date format, expected YYYY-MM-DD".to_string()) + })? + } else { + today + }; + + if start_date > end_date { + return Err(AppError::NotFound( + "start_date cannot be later than end_date".to_string(), + )); + } + + if (end_date - start_date).num_days() > 730 { + return Err(AppError::NotFound( + "Date range cannot exceed 2 years".to_string(), + )); + } + + Ok((start_date, end_date)) + } + + pub async fn get_current_user_contribution_heatmap( + &self, + context: Session, + query: ContributionHeatmapQuery, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let user = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + self.get_user_contribution_heatmap(context, user.username, query) + .await + } +} diff --git a/libs/service/user/mod.rs b/libs/service/user/mod.rs new file mode 100644 index 0000000..ecc4bb0 --- /dev/null +++ b/libs/service/user/mod.rs @@ -0,0 +1,12 @@ +pub mod access_key; +pub mod avatar; +pub mod chpc; +pub mod notification; +pub mod notify; +pub mod preferences; +pub mod profile; +pub mod projects; +pub mod repository; +pub mod ssh_key; +pub mod subscribe; +pub mod user_info; diff --git a/libs/service/user/notification.rs b/libs/service/user/notification.rs new file mode 100644 index 0000000..09830ca --- /dev/null +++ b/libs/service/user/notification.rs @@ -0,0 +1,200 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::users::{user_activity_log, user_notification}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct NotificationPreferencesParams { + pub email_enabled: Option, + pub in_app_enabled: Option, + pub push_enabled: Option, + pub digest_mode: Option, + pub dnd_enabled: Option, + pub dnd_start_minute: Option, + pub dnd_end_minute: Option, + pub marketing_enabled: Option, + pub security_enabled: Option, + pub product_enabled: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct NotificationPreferencesResponse { + pub user_id: Uuid, + pub email_enabled: bool, + pub in_app_enabled: bool, + pub push_enabled: bool, + pub digest_mode: String, + pub dnd_enabled: bool, + pub dnd_start_minute: Option, + pub dnd_end_minute: Option, + pub marketing_enabled: bool, + pub security_enabled: bool, + pub product_enabled: bool, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for NotificationPreferencesResponse { + fn from(prefs: user_notification::Model) -> Self { + NotificationPreferencesResponse { + user_id: prefs.user, + email_enabled: prefs.email_enabled, + in_app_enabled: prefs.in_app_enabled, + push_enabled: prefs.push_enabled, + digest_mode: prefs.digest_mode, + dnd_enabled: prefs.dnd_enabled, + dnd_start_minute: prefs.dnd_start_minute, + dnd_end_minute: prefs.dnd_end_minute, + marketing_enabled: prefs.marketing_enabled, + security_enabled: prefs.security_enabled, + product_enabled: prefs.product_enabled, + created_at: prefs.created_at, + updated_at: prefs.updated_at, + } + } +} + +impl AppService { + pub async fn user_get_notification_preferences( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let prefs = user_notification::Entity::find_by_id(user_uid) + .one(&self.db) + .await?; + + if let Some(prefs) = prefs { + Ok(NotificationPreferencesResponse::from(prefs)) + } else { + self.user_create_default_notification_preferences(user_uid) + .await + } + } + + pub async fn user_update_notification_preferences( + &self, + context: &Session, + params: NotificationPreferencesParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let prefs = user_notification::Entity::find_by_id(user_uid) + .one(&self.db) + .await?; + + let updated_prefs = if let Some(prefs) = prefs { + let mut active_prefs: user_notification::ActiveModel = prefs.into(); + + if let Some(email_enabled) = params.email_enabled { + active_prefs.email_enabled = Set(email_enabled); + } + if let Some(in_app_enabled) = params.in_app_enabled { + active_prefs.in_app_enabled = Set(in_app_enabled); + } + if let Some(push_enabled) = params.push_enabled { + active_prefs.push_enabled = Set(push_enabled); + } + if let Some(digest_mode) = params.digest_mode.clone() { + active_prefs.digest_mode = Set(digest_mode); + } + if let Some(dnd_enabled) = params.dnd_enabled { + active_prefs.dnd_enabled = Set(dnd_enabled); + } + if let Some(dnd_start_minute) = params.dnd_start_minute { + active_prefs.dnd_start_minute = Set(Some(dnd_start_minute)); + } + if let Some(dnd_end_minute) = params.dnd_end_minute { + active_prefs.dnd_end_minute = Set(Some(dnd_end_minute)); + } + if let Some(marketing_enabled) = params.marketing_enabled { + active_prefs.marketing_enabled = Set(marketing_enabled); + } + if let Some(security_enabled) = params.security_enabled { + active_prefs.security_enabled = Set(security_enabled); + } + if let Some(product_enabled) = params.product_enabled { + active_prefs.product_enabled = Set(product_enabled); + } + active_prefs.updated_at = Set(Utc::now()); + + active_prefs.update(&self.db).await? + } else { + let new_prefs = user_notification::ActiveModel { + user: Set(user_uid), + email_enabled: Set(params.email_enabled.unwrap_or(true)), + in_app_enabled: Set(params.in_app_enabled.unwrap_or(true)), + push_enabled: Set(params.push_enabled.unwrap_or(false)), + digest_mode: Set(params + .digest_mode + .clone() + .unwrap_or_else(|| "daily".to_string())), + dnd_enabled: Set(params.dnd_enabled.unwrap_or(false)), + dnd_start_minute: Set(params.dnd_start_minute), + dnd_end_minute: Set(params.dnd_end_minute), + marketing_enabled: Set(params.marketing_enabled.unwrap_or(false)), + security_enabled: Set(params.security_enabled.unwrap_or(true)), + product_enabled: Set(params.product_enabled.unwrap_or(false)), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + + new_prefs.insert(&self.db).await? + }; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("notification_preferences_update".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "updated_fields": { + "email_enabled": params.email_enabled.is_some(), + "in_app_enabled": params.in_app_enabled.is_some(), + "push_enabled": params.push_enabled.is_some(), + "digest_mode": params.digest_mode.is_some(), + "dnd_enabled": params.dnd_enabled.is_some(), + "marketing_enabled": params.marketing_enabled.is_some(), + "security_enabled": params.security_enabled.is_some(), + "product_enabled": params.product_enabled.is_some(), + } + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(NotificationPreferencesResponse::from(updated_prefs)) + } + + async fn user_create_default_notification_preferences( + &self, + user_uid: Uuid, + ) -> Result { + let prefs = user_notification::ActiveModel { + user: Set(user_uid), + email_enabled: Set(true), + in_app_enabled: Set(true), + push_enabled: Set(false), + digest_mode: Set("daily".to_string()), + dnd_enabled: Set(false), + dnd_start_minute: Set(None), + dnd_end_minute: Set(None), + marketing_enabled: Set(false), + security_enabled: Set(true), + product_enabled: Set(false), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + + let created_prefs = prefs.insert(&self.db).await?; + + Ok(NotificationPreferencesResponse::from(created_prefs)) + } +} diff --git a/libs/service/user/notify.rs b/libs/service/user/notify.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/libs/service/user/notify.rs @@ -0,0 +1 @@ + diff --git a/libs/service/user/preferences.rs b/libs/service/user/preferences.rs new file mode 100644 index 0000000..c228802 --- /dev/null +++ b/libs/service/user/preferences.rs @@ -0,0 +1,151 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::users::{user_activity_log, user_preferences}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct PreferencesParams { + pub language: Option, + pub theme: Option, + pub timezone: Option, + pub email_notifications: Option, + pub in_app_notifications: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct PreferencesResponse { + pub language: String, + pub theme: String, + pub timezone: String, + pub email_notifications: bool, + pub in_app_notifications: bool, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for PreferencesResponse { + fn from(prefs: user_preferences::Model) -> Self { + PreferencesResponse { + language: prefs.language, + theme: prefs.theme, + timezone: prefs.timezone, + email_notifications: prefs.email_notifications, + in_app_notifications: prefs.in_app_notifications, + created_at: prefs.created_at, + updated_at: prefs.updated_at, + } + } +} + +impl AppService { + pub async fn user_get_preferences( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let prefs = user_preferences::Entity::find_by_id(user_uid) + .one(&self.db) + .await?; + + if let Some(prefs) = prefs { + Ok(PreferencesResponse::from(prefs)) + } else { + self.user_create_default_preferences(user_uid).await + } + } + + pub async fn user_update_preferences( + &self, + context: &Session, + params: PreferencesParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let prefs = user_preferences::Entity::find_by_id(user_uid) + .one(&self.db) + .await?; + + let updated_prefs = if let Some(prefs) = prefs { + let mut active_prefs: user_preferences::ActiveModel = prefs.into(); + + if let Some(language) = params.language.clone() { + active_prefs.language = Set(language); + } + if let Some(theme) = params.theme.clone() { + active_prefs.theme = Set(theme); + } + if let Some(timezone) = params.timezone.clone() { + active_prefs.timezone = Set(timezone); + } + if let Some(email_notifications) = params.email_notifications { + active_prefs.email_notifications = Set(email_notifications); + } + if let Some(in_app_notifications) = params.in_app_notifications { + active_prefs.in_app_notifications = Set(in_app_notifications); + } + active_prefs.updated_at = Set(Utc::now()); + + active_prefs.update(&self.db).await? + } else { + let new_prefs = user_preferences::ActiveModel { + user: Set(user_uid), + language: Set(params.language.clone().unwrap_or_else(|| "en".to_string())), + theme: Set(params.theme.clone().unwrap_or_else(|| "light".to_string())), + timezone: Set(params.timezone.clone().unwrap_or_else(|| "UTC".to_string())), + email_notifications: Set(params.email_notifications.unwrap_or(true)), + in_app_notifications: Set(params.in_app_notifications.unwrap_or(true)), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + + new_prefs.insert(&self.db).await? + }; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("preferences_update".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "updated_fields": { + "language": params.language.is_some(), + "theme": params.theme.is_some(), + "timezone": params.timezone.is_some(), + "email_notifications": params.email_notifications.is_some(), + "in_app_notifications": params.in_app_notifications.is_some(), + } + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(PreferencesResponse::from(updated_prefs)) + } + + async fn user_create_default_preferences( + &self, + user_uid: Uuid, + ) -> Result { + let prefs = user_preferences::ActiveModel { + user: Set(user_uid), + language: Set("en".to_string()), + theme: Set("light".to_string()), + timezone: Set("UTC".to_string()), + email_notifications: Set(true), + in_app_notifications: Set(true), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + + let created_prefs = prefs.insert(&self.db).await?; + + Ok(PreferencesResponse::from(created_prefs)) + } +} diff --git a/libs/service/user/profile.rs b/libs/service/user/profile.rs new file mode 100644 index 0000000..d18d6b9 --- /dev/null +++ b/libs/service/user/profile.rs @@ -0,0 +1,118 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::users::{user, user_activity_log}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct UpdateProfileParams { + pub avatar_url: Option, + pub website_url: Option, + pub organization: Option, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct ProfileResponse { + pub uid: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub website_url: Option, + pub organization: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, + pub last_sign_in_at: Option>, +} + +impl From for ProfileResponse { + fn from(user: user::Model) -> Self { + ProfileResponse { + uid: user.uid, + username: user.username, + display_name: user.display_name, + avatar_url: user.avatar_url, + website_url: user.website_url, + organization: user.organization, + created_at: user.created_at, + updated_at: user.updated_at, + last_sign_in_at: user.last_sign_in_at, + } + } +} + +impl AppService { + pub async fn user_get_profile(&self, user_uid: Uuid) -> Result { + let user = self.utils_find_user_by_uid(user_uid).await?; + Ok(ProfileResponse::from(user)) + } + pub async fn user_get_profile_by_username( + &self, + user_name: String, + ) -> Result { + let user = self.utils_find_user_by_username(user_name).await?; + Ok(ProfileResponse::from(user)) + } + pub async fn user_get_profile_by_context( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + let user = self.utils_find_user_by_uid(user_uid).await?; + Ok(ProfileResponse::from(user)) + } + + pub async fn user_get_current_profile( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + self.user_get_profile(user_uid).await + } + + pub async fn user_update_profile( + &self, + context: &Session, + params: UpdateProfileParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + let user: user::Model = self.utils_find_user_by_uid(user_uid).await?; + + let mut active_user: user::ActiveModel = user.into(); + + if params.avatar_url.is_some() { + active_user.avatar_url = Set(params.avatar_url.clone()); + } + if params.website_url.is_some() { + active_user.website_url = Set(params.website_url.clone()); + } + if params.organization.is_some() { + active_user.organization = Set(params.organization.clone()); + } + active_user.updated_at = Set(Utc::now()); + + let updated_user = active_user.update(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("profile_update".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "updated_fields": { + "avatar_url": params.avatar_url.is_some(), + "website_url": params.website_url.is_some(), + "organization": params.organization.is_some(), + } + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(ProfileResponse::from(updated_user)) + } +} diff --git a/libs/service/user/projects.rs b/libs/service/user/projects.rs new file mode 100644 index 0000000..14f17d9 --- /dev/null +++ b/libs/service/user/projects.rs @@ -0,0 +1,142 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::projects::{project, project_members}; +use models::users::user; +use sea_orm::prelude::*; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::{IntoParams, ToSchema}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct UserProjectInfo { + pub uid: Uuid, + pub name: String, + pub display_name: String, + pub avatar_url: Option, + pub description: Option, + pub is_public: bool, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, + pub member_count: i64, + pub is_member: bool, +} +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct UserProjectsResponse { + pub username: String, + pub projects: Vec, + pub total_count: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct UserProjectsQuery { + pub page: Option, + pub per_page: Option, +} + +impl AppService { + pub async fn get_user_projects( + &self, + context: Session, + username: String, + query: UserProjectsQuery, + ) -> Result { + let target_user = user::Entity::find() + .filter(user::Column::Username.eq(&username)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let current_user_uid = context.user(); + + let is_owner = current_user_uid + .map(|uid| uid == target_user.uid) + .unwrap_or(false); + let has_admin_privilege = false; + + let page = std::cmp::Ord::max(query.page.unwrap_or(1), 1); + let per_page = std::cmp::Ord::min(std::cmp::Ord::max(query.per_page.unwrap_or(20), 1), 100); + let offset = (page - 1) * per_page; + + let mut condition = Condition::all().add(project::Column::CreatedBy.eq(target_user.uid)); + + if !is_owner && !has_admin_privilege { + condition = condition.add(project::Column::IsPublic.eq(true)); + } + + let total_count = project::Entity::find() + .filter(condition.clone()) + .count(&self.db) + .await?; + + let project_list = project::Entity::find() + .filter(condition) + .order_by_desc(project::Column::CreatedAt) + .limit(per_page) + .offset(offset) + .all(&self.db) + .await?; + + let user_project_memberships: std::collections::HashSet = + if let Some(uid) = current_user_uid { + project_members::Entity::find() + .filter(project_members::Column::User.eq(uid)) + .select_only() + .column(project_members::Column::Project) + .into_tuple::() + .all(&self.db) + .await? + .into_iter() + .collect() + } else { + std::collections::HashSet::new() + }; + + let mut project_infos: Vec = Vec::new(); + for project in project_list { + let member_count = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .count(&self.db) + .await?; + + let is_member = user_project_memberships.contains(&project.id); + + project_infos.push(UserProjectInfo { + uid: project.id, + name: project.name, + display_name: project.display_name, + avatar_url: project.avatar_url, + description: project.description, + is_public: project.is_public, + created_at: project.created_at, + updated_at: project.updated_at, + member_count: member_count as i64, + is_member, + }); + } + + Ok(UserProjectsResponse { + username: target_user.username, + projects: project_infos, + total_count, + }) + } + + pub async fn get_current_user_projects( + &self, + context: Session, + query: UserProjectsQuery, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let user = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + self.get_user_projects(context, user.username, query).await + } +} diff --git a/libs/service/user/repository.rs b/libs/service/user/repository.rs new file mode 100644 index 0000000..89718ce --- /dev/null +++ b/libs/service/user/repository.rs @@ -0,0 +1,117 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::repos::repo; +use models::users::user; +use sea_orm::prelude::*; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::{IntoParams, ToSchema}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct UserRepoInfo { + pub uid: Uuid, + pub repo_name: String, + pub description: Option, + pub default_branch: String, + pub is_private: bool, + pub storage_path: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct UserReposResponse { + pub username: String, + pub repos: Vec, + pub total_count: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct UserReposQuery { + pub page: Option, + pub per_page: Option, +} + +impl AppService { + pub async fn get_user_repos( + &self, + context: Session, + username: String, + query: UserReposQuery, + ) -> Result { + let target_user = user::Entity::find() + .filter(user::Column::Username.eq(&username)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let current_user_uid = context.user(); + + let is_owner = current_user_uid + .map(|uid| uid == target_user.uid) + .unwrap_or(false); + let has_admin_privilege = false; + + let page = std::cmp::Ord::max(query.page.unwrap_or(1), 1); + let per_page = std::cmp::Ord::min(std::cmp::Ord::max(query.per_page.unwrap_or(20), 1), 100); + let offset = (page - 1) * per_page; + + let mut condition = Condition::all().add(repo::Column::CreatedBy.eq(target_user.uid)); + + if !is_owner && !has_admin_privilege { + condition = condition.add(repo::Column::IsPrivate.eq(false)); + } + + let total_count = repo::Entity::find() + .filter(condition.clone()) + .count(&self.db) + .await?; + + let repos = repo::Entity::find() + .filter(condition) + .order_by_desc(repo::Column::CreatedAt) + .limit(per_page) + .offset(offset) + .all(&self.db) + .await?; + + let repo_infos: Vec = repos + .into_iter() + .map(|r| UserRepoInfo { + uid: r.id, + repo_name: r.repo_name, + description: r.description, + default_branch: r.default_branch, + is_private: r.is_private, + storage_path: r.storage_path, + created_at: r.created_at, + updated_at: r.updated_at, + }) + .collect(); + + Ok(UserReposResponse { + username: target_user.username, + repos: repo_infos, + total_count, + }) + } + + pub async fn get_current_user_repos( + &self, + context: Session, + query: UserReposQuery, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let user = user::Entity::find() + .filter(user::Column::Uid.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + self.get_user_repos(context, user.username, query).await + } +} diff --git a/libs/service/user/ssh_key.rs b/libs/service/user/ssh_key.rs new file mode 100644 index 0000000..b9f836e --- /dev/null +++ b/libs/service/user/ssh_key.rs @@ -0,0 +1,396 @@ +use crate::AppService; +use crate::error::AppError; +use base64::Engine; +use base64::engine::general_purpose; +use chrono::Utc; +use models::users::{user_activity_log, user_ssh_key}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use sha2::{Digest, Sha256}; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct AddSshKeyParams { + pub title: String, + + pub public_key: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct UpdateSshKeyParams { + pub title: Option, + + pub expires_at: Option>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct SshKeyResponse { + pub id: i64, + pub user_uid: Uuid, + pub title: String, + pub fingerprint: String, + pub key_type: String, + pub key_bits: Option, + pub is_verified: bool, + pub last_used_at: Option>, + pub expires_at: Option>, + pub is_revoked: bool, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct SshKeyListResponse { + pub keys: Vec, + pub total: usize, +} + +#[derive(Debug)] +pub struct ParsedSshKey { + pub key_type: String, + pub key_data: Vec, + pub key_bits: Option, + pub comment: Option, +} + +impl AppService { + pub async fn user_add_ssh_key( + &self, + context: &Session, + params: AddSshKeyParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let public_key = params.public_key.trim().to_string(); + + let parsed = self.user_parse_ssh_public_key(&public_key)?; + + let fingerprint = self.user_generate_ssh_fingerprint(&parsed.key_data)?; + + let existing: Option = user_ssh_key::Entity::find() + .filter(user_ssh_key::Column::Fingerprint.eq(&fingerprint)) + .one(&self.db) + .await?; + + if existing.is_some() { + return Err(AppError::BadRequest("SSH key already exists".to_string())); + } + + let count = user_ssh_key::Entity::find() + .filter(user_ssh_key::Column::User.eq(user_uid)) + .filter(user_ssh_key::Column::IsRevoked.eq(false)) + .count(&self.db) + .await?; + + if count >= 50 { + return Err(AppError::BadRequest("Too many SSH keys".to_string())); + } + + let now = Utc::now(); + let ssh_key_model = user_ssh_key::ActiveModel { + user: Set(user_uid), + title: Set(params.title.clone()), + public_key: Set(public_key), + fingerprint: Set(fingerprint), + key_type: Set(parsed.key_type.clone()), + key_bits: Set(parsed.key_bits), + is_verified: Set(false), + last_used_at: Set(None), + expires_at: Set(None), + is_revoked: Set(false), + created_at: Set(now), + updated_at: Set(now), + ..Default::default() + }; + + let created_key = ssh_key_model.insert(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("ssh_key_add".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "key_id": created_key.id, + "key_title": params.title, + "key_type": parsed.key_type, + "fingerprint": created_key.fingerprint + })), + created_at: Set(now), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(self.user_model_to_response(created_key)) + } + + pub async fn user_list_ssh_keys( + &self, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let keys: Vec = user_ssh_key::Entity::find() + .filter(user_ssh_key::Column::User.eq(user_uid)) + .order_by_desc(user_ssh_key::Column::CreatedAt) + .all(&self.db) + .await?; + + let total = keys.len(); + let keys = keys + .into_iter() + .map(|k| self.user_model_to_response(k)) + .collect(); + + Ok(SshKeyListResponse { keys, total }) + } + + pub async fn user_get_ssh_key( + &self, + context: &Session, + key_id: i64, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let key: user_ssh_key::Model = user_ssh_key::Entity::find_by_id(key_id) + .filter(user_ssh_key::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + + Ok(self.user_model_to_response(key)) + } + + pub async fn user_update_ssh_key( + &self, + context: &Session, + key_id: i64, + params: UpdateSshKeyParams, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let key: user_ssh_key::Model = user_ssh_key::Entity::find_by_id(key_id) + .filter(user_ssh_key::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + + let mut active_key: user_ssh_key::ActiveModel = key.into(); + + let updated_title = params.title.clone(); + let updated_expires_at = params.expires_at; + + if let Some(title) = params.title { + active_key.title = Set(title); + } + + if let Some(expires_at) = params.expires_at { + active_key.expires_at = Set(Some(expires_at)); + } + + active_key.updated_at = Set(Utc::now()); + + let updated_key = active_key.update(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("ssh_key_update".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "key_id": key_id, + "updated_fields": { + "title": updated_title, + "expires_at": updated_expires_at + } + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(self.user_model_to_response(updated_key)) + } + + pub async fn user_delete_ssh_key( + &self, + context: &Session, + key_id: i64, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let key: user_ssh_key::Model = user_ssh_key::Entity::find_by_id(key_id) + .filter(user_ssh_key::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("ssh_key_delete".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "key_id": key_id, + "key_title": key.title.clone(), + "fingerprint": key.fingerprint.clone() + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + user_ssh_key::Entity::delete(key.into_active_model()) + .exec(&self.db) + .await?; + + Ok(()) + } + + pub async fn user_revoke_ssh_key( + &self, + context: &Session, + key_id: i64, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let key: user_ssh_key::Model = user_ssh_key::Entity::find_by_id(key_id) + .filter(user_ssh_key::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + + let mut active_key: user_ssh_key::ActiveModel = key.clone().into(); + active_key.is_revoked = Set(true); + active_key.updated_at = Set(Utc::now()); + + active_key.update(&self.db).await?; + + let _ = user_activity_log::ActiveModel { + user_uid: Set(Some(user_uid)), + action: Set("ssh_key_revoke".to_string()), + ip_address: Set(context.ip_address()), + user_agent: Set(context.user_agent()), + details: Set(serde_json::json!({ + "key_id": key_id, + "key_title": key.title, + "fingerprint": key.fingerprint + })), + created_at: Set(Utc::now()), + ..Default::default() + } + .insert(&self.db) + .await; + + Ok(()) + } + + pub async fn user_verify_ssh_key_by_fingerprint( + &self, + fingerprint: &str, + ) -> Result { + let key: user_ssh_key::Model = user_ssh_key::Entity::find() + .filter(user_ssh_key::Column::Fingerprint.eq(fingerprint)) + .filter(user_ssh_key::Column::IsRevoked.eq(false)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + + if let Some(expires_at) = key.expires_at { + let now = Utc::now(); + if now > expires_at { + return Err(AppError::Unauthorized); // AppError::SshKeyExpired replaced with generic Unauthorized + } + } + + Ok(key) + } + + pub async fn user_touch_ssh_key_last_used(&self, key_id: i64) -> Result<(), AppError> { + let key: user_ssh_key::Model = user_ssh_key::Entity::find_by_id(key_id) + .one(&self.db) + .await? + .ok_or(AppError::NotFound("SSH key not found".to_string()))?; + + let mut active_key: user_ssh_key::ActiveModel = key.into(); + active_key.last_used_at = Set(Some(Utc::now())); + active_key.updated_at = Set(Utc::now()); + + active_key.update(&self.db).await?; + + Ok(()) + } + + fn user_parse_ssh_public_key(&self, public_key: &str) -> Result { + let parts: Vec<&str> = public_key.split_whitespace().collect(); + if parts.len() < 2 { + return Err(AppError::BadRequest("Invalid SSH key format".to_string())); + } + let key_type = parts[0]; + let key_data_base64 = parts[1]; + let comment: Option = parts.get(2).map(|s| (*s).to_string()); + let normalized_key_type = match key_type { + "ssh-rsa" => "rsa", + "ssh-ed25519" => "ed25519", + "ecdsa-sha2-nistp256" | "ecdsa-sha2-nistp384" | "ecdsa-sha2-nistp521" => "ecdsa", + "ssh-dss" => "dsa", + _ => return Err(AppError::BadRequest("Unsupported SSH key type".to_string())), + }; + let key_data = general_purpose::STANDARD + .decode(key_data_base64) + .map_err(|_| AppError::BadRequest("Invalid SSH key format".to_string()))?; + let key_bits = if normalized_key_type == "rsa" { + self.user_calculate_rsa_key_bits(&key_data) + } else { + None + }; + + Ok(ParsedSshKey { + key_type: normalized_key_type.to_string(), + key_data, + key_bits, + comment, + }) + } + + fn user_generate_ssh_fingerprint(&self, key_data: &[u8]) -> Result { + let mut hasher = Sha256::new(); + hasher.update(key_data); + let hash = hasher.finalize(); + let fingerprint = format!("SHA256:{}", general_purpose::STANDARD_NO_PAD.encode(&hash)); + + Ok(fingerprint) + } + + fn user_calculate_rsa_key_bits(&self, key_data: &[u8]) -> Option { + if key_data.len() < 256 { + Some(1024) + } else if key_data.len() < 512 { + Some(2048) + } else if key_data.len() < 1024 { + Some(4096) + } else { + Some(8192) + } + } + + fn user_model_to_response(&self, model: user_ssh_key::Model) -> SshKeyResponse { + SshKeyResponse { + id: model.id, + user_uid: model.user, + title: model.title, + fingerprint: model.fingerprint, + key_type: model.key_type, + key_bits: model.key_bits, + is_verified: model.is_verified, + last_used_at: model.last_used_at, + expires_at: model.expires_at, + is_revoked: model.is_revoked, + created_at: model.created_at, + updated_at: model.updated_at, + } + } +} diff --git a/libs/service/user/subscribe.rs b/libs/service/user/subscribe.rs new file mode 100644 index 0000000..c538b88 --- /dev/null +++ b/libs/service/user/subscribe.rs @@ -0,0 +1,157 @@ +use crate::{AppService, error::AppError}; +use chrono::Utc; +use models::users::user_relation; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct SubscriptionInfo { + pub id: i64, + pub user_uid: Uuid, + pub target_uid: Uuid, + pub subscribed_at: chrono::DateTime, + pub is_active: bool, +} + +impl From for SubscriptionInfo { + fn from(sub: user_relation::Model) -> Self { + SubscriptionInfo { + id: sub.id, + user_uid: sub.user, + target_uid: sub.target, + subscribed_at: sub.created_at, + is_active: true, // user_relation doesn't have is_active, we treat follow as active + } + } +} + +impl AppService { + pub async fn user_subscribe_target( + &self, + context: Session, + target: String, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let target_user = self.utils_find_user_by_username(target).await?; + let target_uid = target_user.uid; + + let existing = user_relation::Entity::find() + .filter(user_relation::Column::User.eq(user_uid)) + .filter(user_relation::Column::Target.eq(target_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .one(&self.db) + .await?; + + if existing.is_some() { + return Err(AppError::NotFound("Already subscribed".to_string())); + } + + let subscription = user_relation::ActiveModel { + user: Set(user_uid), + target: Set(target_uid), + relation_type: Set("follow".to_string()), + created_at: Set(Utc::now()), + ..Default::default() + }; + + subscription.insert(&self.db).await?; + + Ok(()) + } + + pub async fn user_unsubscribe_target( + &self, + context: Session, + target: String, + ) -> Result<(), AppError> { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let target_user = self.utils_find_user_by_username(target).await?; + let target_uid = target_user.uid; + + user_relation::Entity::delete_many() + .filter(user_relation::Column::User.eq(user_uid)) + .filter(user_relation::Column::Target.eq(target_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .exec(&self.db) + .await?; + + Ok(()) + } + + pub async fn user_is_subscribed_to_target( + &self, + context: Session, + target: String, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + + let target_user = self.utils_find_user_by_username(target).await?; + let target_uid = target_user.uid; + + let subscription = user_relation::Entity::find() + .filter(user_relation::Column::User.eq(user_uid)) + .filter(user_relation::Column::Target.eq(target_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .one(&self.db) + .await?; + + Ok(subscription.is_some()) + } + + pub async fn user_get_subscribers( + &self, + _context: Session, + target: String, + ) -> Result, AppError> { + let target_user = self.utils_find_user_by_username(target).await?; + let target_uid = target_user.uid; + + let subscribers = user_relation::Entity::find() + .filter(user_relation::Column::Target.eq(target_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .order_by_desc(user_relation::Column::CreatedAt) + .all(&self.db) + .await?; + + Ok(subscribers + .into_iter() + .map(SubscriptionInfo::from) + .collect()) + } + + pub async fn user_get_subscription_count( + &self, + _context: Session, + username: String, + ) -> Result { + let user_uid = self.utils_find_user_by_username(username).await?.uid; + let count = user_relation::Entity::find() + .filter(user_relation::Column::User.eq(user_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .count(&self.db) + .await?; + + Ok(count) + } + + pub async fn user_get_subscriber_count( + &self, + _context: Session, + target: String, + ) -> Result { + let target_user = self.utils_find_user_by_username(target).await?; + let target_uid = target_user.uid; + + let count = user_relation::Entity::find() + .filter(user_relation::Column::Target.eq(target_uid)) + .filter(user_relation::Column::RelationType.eq("follow")) + .count(&self.db) + .await?; + + Ok(count) + } +} diff --git a/libs/service/user/user_info.rs b/libs/service/user/user_info.rs new file mode 100644 index 0000000..0f6e2d6 --- /dev/null +++ b/libs/service/user/user_info.rs @@ -0,0 +1,109 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::ToSchema; +use uuid::Uuid; + +#[derive(Clone, Debug, Serialize, Deserialize, ToSchema)] +pub struct UserInfoExternal { + pub user_uid: Uuid, + pub username: String, + pub display_name: String, + pub avatar_url: Option, + pub master_email: Option, + pub timezone: String, + pub language: String, + pub website_url: Option, + pub organization: Option, + pub last_sign_in_at: Option>, + + pub is_owner: bool, + pub is_subscribe: bool, + + pub total_projects: u64, + pub total_repos: u64, +} + +impl AppService { + pub async fn user_info( + &self, + context: Session, + username: String, + ) -> Result { + let user = models::users::user::Entity::find() + .filter(models::users::user::Column::Username.eq(&username)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let preferences = models::users::user_preferences::Entity::find() + .filter(models::users::user_preferences::Column::User.eq(user.uid)) + .one(&self.db) + .await?; + + let master_email = models::users::user_email::Entity::find() + .filter(models::users::user_email::Column::User.eq(user.uid)) + .one(&self.db) + .await? + .map(|email| email.email); + + let is_subscribe = if let Some(current_uid) = context.user() { + let relation = models::users::user_relation::Entity::find() + .filter(models::users::user_relation::Column::User.eq(current_uid)) + .filter(models::users::user_relation::Column::Target.eq(user.uid)) + .filter(models::users::user_relation::Column::RelationType.eq("follow")) + .one(&self.db) + .await?; + relation.is_some() + } else { + false + }; + + // Get user's project memberships to find repos + let user_project_ids: Vec = models::projects::project_members::Entity::find() + .filter(models::projects::project_members::Column::User.eq(user.uid)) + .select_only() + .column(models::projects::project_members::Column::Project) + .into_tuple::() + .all(&self.db) + .await?; + + // Count projects created by user + let total_projects = models::projects::project::Entity::find() + .filter(models::projects::project::Column::CreatedBy.eq(user.uid)) + .count(&self.db) + .await?; + + // Count repos in user's projects + let total_repos = models::repos::repo::Entity::find() + .filter(models::repos::repo::Column::Project.is_in(user_project_ids)) + .count(&self.db) + .await?; + + Ok(UserInfoExternal { + user_uid: user.uid, + username: user.username, + display_name: user.display_name.unwrap_or_default(), + avatar_url: user.avatar_url, + master_email, + timezone: preferences + .as_ref() + .map(|p| p.timezone.clone()) + .unwrap_or_else(|| "UTC".to_string()), + language: preferences + .as_ref() + .map(|p| p.language.clone()) + .unwrap_or_else(|| "en".to_string()), + website_url: user.website_url, + organization: user.organization, + last_sign_in_at: user.last_sign_in_at, + is_owner: context.user().map(|u| u == user.uid).unwrap_or(false), + is_subscribe, + total_projects, + total_repos, + }) + } +} diff --git a/libs/service/utils/mod.rs b/libs/service/utils/mod.rs new file mode 100644 index 0000000..eeefba3 --- /dev/null +++ b/libs/service/utils/mod.rs @@ -0,0 +1,4 @@ +pub mod project; +pub mod repo; +pub mod user; +pub mod workspace; diff --git a/libs/service/utils/project.rs b/libs/service/utils/project.rs new file mode 100644 index 0000000..9cf422d --- /dev/null +++ b/libs/service/utils/project.rs @@ -0,0 +1,131 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{MemberRole, project, project_history_name, project_members}; +use models::repos::repo; +use sea_orm::*; +use session::Session; +use std::str::FromStr; +use uuid::Uuid; + +impl AppService { + pub async fn utils_find_project_by_name( + &self, + name: String, + ) -> Result { + match project::Entity::find() + .filter(project::Column::Name.eq(name.clone())) + .one(&self.db) + .await + .ok() + .flatten() + { + Some(project) => Ok(project), + None => match project_history_name::Entity::find() + .filter(project_history_name::Column::HistoryName.eq(name)) + .one(&self.db) + .await + .ok() + .flatten() + { + Some(project) => self.utils_find_project_by_uid(project.project_uid).await, + None => Err(AppError::ProjectNotFound), + }, + } + } + + pub async fn utils_find_project_by_uid(&self, uid: Uuid) -> Result { + project::Entity::find_by_id(uid) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::ProjectNotFound) + } + + pub async fn utils_check_project_permission( + &self, + project_id: &Uuid, + user_id: Uuid, + required_scopes: &[MemberRole], + ) -> Result<(), AppError> { + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(*project_id)) + .filter(project_members::Column::User.eq(user_id)) + .one(&self.db) + .await?; + + if let Some(member) = member { + for scope in required_scopes { + if member.scope_role().ok() == Some(scope.clone()) { + return Ok(()); + } + } + } + + Err(AppError::NoPower) + } + pub async fn utils_project_context_role( + &self, + context: &Session, + project_name: String, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + let project = self.utils_find_project_by_name(project_name).await?; + let members = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project.id)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await + .map_err(|_| AppError::InternalError)? + .map(|m| m.scope); + MemberRole::from_str(&members.ok_or(AppError::Unauthorized)?) + .map_err(|_| AppError::RoleParseError) + } + + pub async fn utils_find_repo_by_name( + &self, + project_uid: Uuid, + repo_name: &str, + ) -> Result { + repo::Entity::find() + .filter(repo::Column::Project.eq(project_uid)) + .filter(repo::Column::RepoName.eq(repo_name)) + .one(&self.db) + .await? + .ok_or(AppError::NotFound(format!( + "Repository '{}' not found", + repo_name + ))) + } + + pub async fn check_project_access( + &self, + project_uid: Uuid, + user_uid: Uuid, + ) -> Result<(), AppError> { + let project = project::Entity::find_by_id(project_uid) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::ProjectNotFound)?; + + // Public project - allow access + if project.is_public { + return Ok(()); + } + + // Private project - check membership + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(project_uid)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + + if member.is_some() { + Ok(()) + } else { + Err(AppError::NoPower) + } + } +} diff --git a/libs/service/utils/repo.rs b/libs/service/utils/repo.rs new file mode 100644 index 0000000..849630d --- /dev/null +++ b/libs/service/utils/repo.rs @@ -0,0 +1,83 @@ +use crate::AppService; +use crate::error::AppError; +use models::projects::{MemberRole, project_members}; +use models::repos::{repo, repo_history_name}; +use sea_orm::*; +use session::Session; + +impl AppService { + pub async fn utils_find_repo( + &self, + namespace: String, + repo_name: String, + context: &Session, + ) -> Result { + let project = self.utils_find_project_by_name(namespace).await?; + // Propagate DB errors — silently treating them as "not found" would mask infrastructure issues. + let repo = repo::Entity::find() + .filter(repo::Column::RepoName.eq(repo_name.clone())) + .filter(repo::Column::Project.eq(project.id)) + .one(&self.db) + .await?; + + let repo = match repo { + Some(r) => r, + // Fall back to historical repo names (renamed repos). + None => { + let hist = repo_history_name::Entity::find() + .filter(repo_history_name::Column::Name.eq(repo_name)) + .filter(repo_history_name::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::RepoNotFound)?; + repo::Entity::find() + .filter(repo::Column::Id.eq(hist.repo)) + .filter(repo::Column::Project.eq(project.id)) + .one(&self.db) + .await? + .ok_or(AppError::RepoNotFound)? + } + }; + + if repo.is_private { + if let Some(user_uid) = context.user() { + let project_member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await?; + if project_member.is_none() { + return Err(AppError::RepoForBidAccess); + } + } else { + return Err(AppError::RepoForBidAccess); + } + } + + Ok(repo) + } + + pub async fn utils_check_repo_admin( + &self, + namespace: String, + repo_name: String, + context: &Session, + ) -> Result { + let user_uid = context.user().ok_or(AppError::Unauthorized)?; + let repo = self.utils_find_repo(namespace, repo_name, context).await?; + + let member = project_members::Entity::find() + .filter(project_members::Column::Project.eq(repo.project)) + .filter(project_members::Column::User.eq(user_uid)) + .one(&self.db) + .await? + .ok_or(AppError::NoPower)?; + + let role = member.scope_role().map_err(|_| AppError::RoleParseError)?; + if role == MemberRole::Admin || role == MemberRole::Owner { + Ok(repo) + } else { + Err(AppError::NoPower) + } + } +} diff --git a/libs/service/utils/user.rs b/libs/service/utils/user.rs new file mode 100644 index 0000000..0eefff2 --- /dev/null +++ b/libs/service/utils/user.rs @@ -0,0 +1,44 @@ +use models::users::*; + +use crate::AppService; +use crate::error::AppError; +use sea_orm::*; +use uuid::Uuid; +impl AppService { + pub async fn utils_find_user_by_username( + &self, + username: String, + ) -> Result { + user::Entity::find() + .filter(user::Column::Username.eq(username)) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound) + } + pub async fn utils_find_user_by_uid(&self, uid: Uuid) -> Result { + user::Entity::find_by_id(uid) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound) + } + pub async fn utils_find_user_by_email(&self, email: String) -> Result { + let user_email = user_email::Entity::find() + .filter(user_email::Column::Email.eq(email)) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound)?; + let user = user::Entity::find_by_id(user_email.user) + .one(&self.db) + .await + .ok() + .flatten() + .ok_or(AppError::UserNotFound)?; + Ok(user) + } +} diff --git a/libs/service/utils/workspace.rs b/libs/service/utils/workspace.rs new file mode 100644 index 0000000..b40300f --- /dev/null +++ b/libs/service/utils/workspace.rs @@ -0,0 +1,74 @@ +use crate::AppService; +use crate::error::AppError; +use models::WorkspaceRole; +use models::workspaces::workspace; +use models::workspaces::workspace_membership; + +use sea_orm::*; +use session::Session; +use uuid::Uuid; + +impl AppService { + pub async fn utils_find_workspace_by_slug( + &self, + slug: String, + ) -> Result { + workspace::Entity::find() + .filter(workspace::Column::Slug.eq(slug)) + .filter(workspace::Column::DeletedAt.is_null()) + .one(&self.db) + .await? + .ok_or(AppError::WorkspaceNotFound) + } + + pub async fn utils_find_workspace_by_id(&self, id: Uuid) -> Result { + workspace::Entity::find_by_id(id) + .filter(workspace::Column::DeletedAt.is_null()) + .one(&self.db) + .await? + .ok_or(AppError::WorkspaceNotFound) + } + + pub async fn utils_workspace_context_role( + &self, + ctx: &Session, + workspace_slug: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await?; + match membership { + Some(m) => m.role.parse().map_err(|_| AppError::RoleParseError), + None => Err(AppError::NotWorkspaceMember), + } + } + + pub async fn utils_check_workspace_permission( + &self, + workspace_id: Uuid, + user_id: Uuid, + required_roles: &[WorkspaceRole], + ) -> Result<(), AppError> { + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(workspace_id)) + .filter(workspace_membership::Column::UserId.eq(user_id)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await?; + + if let Some(member) = membership { + for role in required_roles { + if member.role.parse::() == Ok(role.clone()) { + return Ok(()); + } + } + } + + Err(AppError::PermissionDenied) + } +} diff --git a/libs/service/webhook_dispatch.rs b/libs/service/webhook_dispatch.rs new file mode 100644 index 0000000..808d881 --- /dev/null +++ b/libs/service/webhook_dispatch.rs @@ -0,0 +1,140 @@ +use hmac::{Hmac, Mac}; +use sha2::Sha256; +use std::time::Duration; + +type HmacSha256 = Hmac; + +/// Signs a payload body using HMAC-SHA256 with the given secret. +/// Returns the "X-Hub-Signature-256" header value. +pub fn sign_payload(body: &[u8], secret: &str) -> Option { + if secret.is_empty() { + return None; + } + let mut mac = HmacSha256::new_from_slice(secret.as_bytes()).ok()?; + mac.update(body); + let bytes = mac.finalize().into_bytes(); + Some(format!( + "sha256={}", + bytes.iter().map(|b| format!("{:02x}", b)).collect::() + )) +} + +/// Payload sent for a push event webhook. +#[derive(Debug, serde::Serialize)] +pub struct PushPayload<'a> { + #[serde(rename = "ref")] + pub r#ref: &'a str, + pub before: &'a str, + pub after: &'a str, + pub repository: RepositoryPayload<'a>, + pub pusher: PusherPayload<'a>, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub commits: Vec>, +} + +/// Payload sent for a tag push event webhook. +#[derive(Debug, serde::Serialize)] +pub struct TagPushPayload<'a> { + #[serde(rename = "ref")] + pub r#ref: &'a str, + pub before: &'a str, + pub after: &'a str, + pub repository: RepositoryPayload<'a>, + pub pusher: PusherPayload<'a>, +} + +#[derive(Debug, serde::Serialize)] +pub struct RepositoryPayload<'a> { + pub id: i64, + pub name: &'a str, + pub full_name: &'a str, + pub namespace: &'a str, + pub default_branch: &'a str, +} + +#[derive(Debug, serde::Serialize)] +pub struct PusherPayload<'a> { + pub name: &'a str, + pub email: &'a str, +} + +#[derive(Debug, serde::Serialize)] +pub struct CommitPayload<'a> { + pub id: &'a str, + pub message: &'a str, + pub author: AuthorPayload<'a>, +} + +#[derive(Debug, serde::Serialize)] +pub struct AuthorPayload<'a> { + pub name: &'a str, + pub email: &'a str, +} + +/// A configured webhook destination. +#[derive(Debug, Clone)] +pub struct WebhookTarget { + pub id: i64, + pub url: String, + pub secret: Option, + pub content_type: String, + pub events: WebhookEvents, + pub active: bool, +} + +#[derive(Debug, Default, Clone)] +pub struct WebhookEvents { + pub push: bool, + pub tag_push: bool, + pub pull_request: bool, + pub issue_comment: bool, + pub release: bool, +} + +/// Dispatches a webhook HTTP POST request. +pub async fn deliver( + client: &reqwest::Client, + url: &str, + secret: Option<&str>, + content_type: &str, + body: &[u8], +) -> Result<(), DispatchError> { + let mut req = client + .post(url) + .header("Content-Type", content_type) + .header("User-Agent", "Code-Git-Hook/1.0") + .header("X-Webhook-Event", "push") + .timeout(Duration::from_secs(10)) + .body(body.to_vec()); + + if let Some(secret) = secret { + if let Some(sig) = sign_payload(body, secret) { + req = req.header("X-Hub-Signature-256", sig); + } + } + + let resp = req.send().await.map_err(|e| { + if e.is_timeout() { + DispatchError::Timeout + } else if e.is_connect() { + DispatchError::ConnectionFailed + } else { + DispatchError::RequestFailed(e.to_string()) + } + })?; + + let status = resp.status(); + if status.is_success() { + Ok(()) + } else { + Err(DispatchError::HttpError(status.as_u16())) + } +} + +#[derive(Debug)] +pub enum DispatchError { + Timeout, + ConnectionFailed, + RequestFailed(String), + HttpError(u16), +} diff --git a/libs/service/workspace/billing.rs b/libs/service/workspace/billing.rs new file mode 100644 index 0000000..8bd5178 --- /dev/null +++ b/libs/service/workspace/billing.rs @@ -0,0 +1,265 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Datelike, NaiveDate, Utc}; +use models::Decimal; +use models::workspaces::{workspace_billing, workspace_billing_history, workspace_membership}; +use sea_orm::sea_query::prelude::rust_decimal::prelude::ToPrimitive; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::{IntoParams, ToSchema}; +use uuid::Uuid; + +/// Default monthly AI quota for workspace (shared across all its projects). +const DEFAULT_MONTHLY_QUOTA: f64 = 100.0; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct WorkspaceBillingCurrentResponse { + pub workspace_id: Uuid, + pub currency: String, + pub monthly_quota: f64, + pub balance: f64, + pub total_spent: f64, + pub month_used: f64, + pub cycle_start_utc: DateTime, + pub cycle_end_utc: DateTime, + pub updated_at: DateTime, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, IntoParams)] +pub struct WorkspaceBillingHistoryQuery { + pub page: Option, + pub per_page: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct WorkspaceBillingHistoryItem { + pub uid: Uuid, + pub workspace_id: Uuid, + pub user_id: Option, + pub amount: f64, + pub currency: String, + pub reason: String, + pub extra: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct WorkspaceBillingHistoryResponse { + pub page: u64, + pub per_page: u64, + pub total: u64, + pub list: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +pub struct WorkspaceBillingAddCreditParams { + pub amount: f64, + pub reason: Option, +} + +impl AppService { + /// Get current workspace billing info. + pub async fn workspace_billing_current( + &self, + ctx: &Session, + workspace_slug: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self + .utils_find_workspace_by_slug(workspace_slug.clone()) + .await?; + let _ = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + let billing = self.ensure_workspace_billing(ws.id).await?; + let now_utc = Utc::now(); + let (month_start, next_month_start) = utc_month_bounds(now_utc)?; + + let month_used = workspace_billing_history::Entity::find() + .filter(workspace_billing_history::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_billing_history::Column::Reason.like("ai_usage%")) + .filter(workspace_billing_history::Column::CreatedAt.gte(month_start)) + .filter(workspace_billing_history::Column::CreatedAt.lt(next_month_start)) + .all(&self.db) + .await? + .into_iter() + .map(|m| m.amount.to_f64().unwrap_or_default()) + .sum::(); + + Ok(WorkspaceBillingCurrentResponse { + workspace_id: ws.id, + currency: billing.currency.clone(), + monthly_quota: billing + .monthly_quota + .to_f64() + .unwrap_or(DEFAULT_MONTHLY_QUOTA), + balance: billing.balance.to_f64().unwrap_or_default(), + total_spent: billing.total_spent.to_f64().unwrap_or_default(), + month_used, + cycle_start_utc: month_start, + cycle_end_utc: next_month_start, + updated_at: billing.updated_at, + created_at: billing.created_at, + }) + } + + /// Get workspace billing history. + pub async fn workspace_billing_history( + &self, + ctx: &Session, + workspace_slug: String, + query: WorkspaceBillingHistoryQuery, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self + .utils_find_workspace_by_slug(workspace_slug.clone()) + .await?; + let _ = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + let page = std::cmp::max(query.page.unwrap_or(1), 1); + let per_page = query.per_page.unwrap_or(20).clamp(1, 200); + + self.ensure_workspace_billing(ws.id).await?; + + let paginator = workspace_billing_history::Entity::find() + .filter(workspace_billing_history::Column::WorkspaceId.eq(ws.id)) + .order_by_desc(workspace_billing_history::Column::CreatedAt) + .paginate(&self.db, per_page); + let total = paginator.num_items().await?; + let rows = paginator.fetch_page(page - 1).await?; + + let list = rows + .into_iter() + .map(|x| WorkspaceBillingHistoryItem { + uid: x.uid, + workspace_id: x.workspace_id, + user_id: x.user_id, + amount: x.amount.to_f64().unwrap_or_default(), + currency: x.currency, + reason: x.reason, + extra: x.extra.map(|v| v.into()), + created_at: x.created_at, + }) + .collect(); + + Ok(WorkspaceBillingHistoryResponse { + page, + per_page, + total, + list, + }) + } + + /// Add credit to workspace billing (admin action). + pub async fn workspace_billing_add_credit( + &self, + ctx: &Session, + workspace_slug: String, + params: WorkspaceBillingAddCreditParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self + .utils_find_workspace_by_slug(workspace_slug.clone()) + .await?; + let _ = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + if params.amount <= 0.0 { + return Err(AppError::BadRequest("Amount must be positive".to_string())); + } + + let billing = self.ensure_workspace_billing(ws.id).await?; + let now_utc = Utc::now(); + let new_balance = + Decimal::from_f64_retain(billing.balance.to_f64().unwrap_or_default() + params.amount) + .unwrap_or(Decimal::ZERO); + + let _ = workspace_billing::ActiveModel { + workspace_id: Unchanged(ws.id), + balance: Set(new_balance), + updated_at: Set(now_utc), + ..Default::default() + } + .update(&self.db) + .await; + + let _ = workspace_billing_history::ActiveModel { + uid: Set(Uuid::now_v7()), + workspace_id: Set(ws.id), + user_id: Set(Some(user_uid)), + amount: Set(Decimal::from_f64_retain(params.amount).unwrap_or(Decimal::ZERO)), + currency: Set(billing.currency.clone()), + reason: Set(params.reason.unwrap_or_else(|| "credit_added".to_string())), + extra: Set(None), + created_at: Set(now_utc), + } + .insert(&self.db) + .await; + + self.workspace_billing_current(ctx, workspace_slug).await + } + + /// Ensure workspace billing record exists (create with defaults if not). + pub async fn ensure_workspace_billing( + &self, + workspace_id: Uuid, + ) -> Result { + if let Some(billing) = workspace_billing::Entity::find_by_id(workspace_id) + .one(&self.db) + .await? + { + return Ok(billing); + } + + let now_utc = Utc::now(); + let created = workspace_billing::ActiveModel { + workspace_id: Set(workspace_id), + balance: Set(Decimal::ZERO), + currency: Set("USD".to_string()), + monthly_quota: Set( + Decimal::from_f64_retain(DEFAULT_MONTHLY_QUOTA).unwrap_or(Decimal::ZERO) + ), + total_spent: Set(Decimal::ZERO), + updated_at: Set(now_utc), + created_at: Set(now_utc), + }; + Ok(created.insert(&self.db).await?) + } +} + +fn utc_month_bounds(now_utc: DateTime) -> Result<(DateTime, DateTime), AppError> { + let year = now_utc.year(); + let month = now_utc.month(); + let month_start = NaiveDate::from_ymd_opt(year, month, 1) + .and_then(|d| d.and_hms_opt(0, 0, 0)) + .map(|d| chrono::TimeZone::from_utc_datetime(&Utc, &d)) + .ok_or_else(|| AppError::InternalServerError("Invalid UTC month start".to_string()))?; + let (next_year, next_month) = if month == 12 { + (year + 1, 1) + } else { + (year, month + 1) + }; + let next_month_start = NaiveDate::from_ymd_opt(next_year, next_month, 1) + .and_then(|d| d.and_hms_opt(0, 0, 0)) + .map(|d| chrono::TimeZone::from_utc_datetime(&Utc, &d)) + .ok_or_else(|| AppError::InternalServerError("Invalid UTC next month start".to_string()))?; + Ok((month_start, next_month_start)) +} diff --git a/libs/service/workspace/info.rs b/libs/service/workspace/info.rs new file mode 100644 index 0000000..9b8de19 --- /dev/null +++ b/libs/service/workspace/info.rs @@ -0,0 +1,329 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{DateTime, Utc}; +use models::projects::project; +use models::projects::project_activity; +use models::users::user; +use models::workspaces::workspace; +use models::workspaces::workspace_membership; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use utoipa::IntoParams; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceInfoResponse { + pub id: Uuid, + pub slug: String, + pub name: String, + pub description: Option, + pub avatar_url: Option, + pub plan: String, + pub billing_email: Option, + pub member_count: i64, + pub my_role: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceListItem { + pub id: Uuid, + pub slug: String, + pub name: String, + pub description: Option, + pub avatar_url: Option, + pub plan: String, + pub my_role: String, + pub created_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceListResponse { + pub workspaces: Vec, + pub total: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema, IntoParams)] +pub struct WorkspaceProjectsQuery { + pub page: Option, + pub per_page: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceProjectItem { + pub uid: Uuid, + pub name: String, + pub display_name: String, + pub avatar_url: Option, + pub description: Option, + pub is_public: bool, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceProjectsResponse { + pub projects: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceActivityItem { + pub id: i64, + pub project_name: String, + pub event_type: String, + pub title: String, + pub content: Option, + pub actor_name: String, + pub actor_avatar: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceStatsResponse { + pub project_count: i64, + pub member_count: i64, + pub my_role: Option, + pub recent_activities: Vec, +} + +impl AppService { + /// Get workspace info by slug. Returns error if user is not a member. + pub async fn workspace_info( + &self, + ctx: &Session, + slug: String, + ) -> Result { + let user_uid = ctx.user(); + let ws = self.utils_find_workspace_by_slug(slug.clone()).await?; + + let my_role = if let Some(uid) = user_uid { + workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .map(|m| m.role) + } else { + None + }; + + let member_count = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::Status.eq("active")) + .count(&self.db) + .await?; + + Ok(WorkspaceInfoResponse { + id: ws.id, + slug: ws.slug, + name: ws.name, + description: ws.description, + avatar_url: ws.avatar_url, + plan: ws.plan, + billing_email: ws.billing_email, + member_count: member_count as i64, + my_role, + created_at: ws.created_at, + updated_at: ws.updated_at, + }) + } + + /// List all workspaces the current user is a member of. + pub async fn workspace_list(&self, ctx: &Session) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let memberships = workspace_membership::Entity::find() + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .all(&self.db) + .await?; + + let workspace_ids: Vec = memberships.iter().map(|m| m.workspace_id).collect(); + let total = workspace_ids.len() as u64; + + let workspaces = workspace::Entity::find() + .filter(workspace::Column::Id.is_in(workspace_ids.clone())) + .filter(workspace::Column::DeletedAt.is_null()) + .all(&self.db) + .await?; + + let items: Vec = workspaces + .into_iter() + .map(|ws| { + let membership = memberships + .iter() + .find(|m| m.workspace_id == ws.id) + .cloned() + .unwrap(); + WorkspaceListItem { + id: ws.id, + slug: ws.slug, + name: ws.name, + description: ws.description, + avatar_url: ws.avatar_url, + plan: ws.plan, + my_role: membership.role, + created_at: ws.created_at, + } + }) + .collect(); + + Ok(WorkspaceListResponse { + workspaces: items, + total, + }) + } + + /// List projects belonging to a workspace. + pub async fn workspace_projects( + &self, + ctx: &Session, + workspace_slug: String, + query: WorkspaceProjectsQuery, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + let _ = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + let page = std::cmp::max(query.page.unwrap_or(1), 1); + let per_page = query.per_page.unwrap_or(20).clamp(1, 200); + + let paginator = project::Entity::find() + .filter(project::Column::WorkspaceId.eq(ws.id)) + .order_by_desc(project::Column::CreatedAt) + .paginate(&self.db, per_page); + let total = paginator.num_items().await?; + let rows = paginator.fetch_page(page - 1).await?; + + let projects = rows + .into_iter() + .map(|p| WorkspaceProjectItem { + uid: p.id, + name: p.name, + display_name: p.display_name, + avatar_url: p.avatar_url, + description: p.description, + is_public: p.is_public, + created_at: p.created_at, + updated_at: p.updated_at, + }) + .collect(); + + Ok(WorkspaceProjectsResponse { + projects, + total, + page, + per_page, + }) + } + + /// Get workspace stats: project count, member count, recent activities. + pub async fn workspace_stats( + &self, + ctx: &Session, + workspace_slug: String, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(user_uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await?; + + let member_count = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::Status.eq("active")) + .count(&self.db) + .await?; + + let project_count = project::Entity::find() + .filter(project::Column::WorkspaceId.eq(ws.id)) + .count(&self.db) + .await?; + + // Get recent activities across all workspace projects + let workspace_projects = project::Entity::find() + .filter(project::Column::WorkspaceId.eq(ws.id)) + .all(&self.db) + .await?; + let project_ids: Vec = workspace_projects.iter().map(|p| p.id).collect(); + let project_names: std::collections::HashMap = workspace_projects + .into_iter() + .map(|p| (p.id, p.name)) + .collect(); + + let recent_activities = if project_ids.is_empty() { + Vec::new() + } else { + let activities = project_activity::Entity::find() + .filter(project_activity::Column::Project.is_in(project_ids.clone())) + .filter(project_activity::Column::IsPrivate.eq(false)) + .order_by_desc(project_activity::Column::CreatedAt) + .limit(10) + .all(&self.db) + .await?; + + // Collect actor IDs + let actor_ids: Vec = activities.iter().map(|a| a.actor).collect(); + let actors = user::Entity::find() + .filter(user::Column::Uid.is_in(actor_ids)) + .all(&self.db) + .await?; + let actor_map: std::collections::HashMap)> = actors + .into_iter() + .map(|u| { + ( + u.uid, + ( + u.display_name.or(Some(u.username)).unwrap_or_default(), + u.avatar_url, + ), + ) + }) + .collect(); + + activities + .into_iter() + .map(|a| { + let (actor_name, actor_avatar) = actor_map + .get(&a.actor) + .cloned() + .unwrap_or_else(|| ("Unknown".to_string(), None)); + WorkspaceActivityItem { + id: a.id, + project_name: project_names.get(&a.project).cloned().unwrap_or_default(), + event_type: a.event_type, + title: a.title, + content: a.content, + actor_name, + actor_avatar, + created_at: a.created_at, + } + }) + .collect() + }; + + Ok(WorkspaceStatsResponse { + project_count: project_count as i64, + member_count: member_count as i64, + my_role: membership.map(|m| m.role), + recent_activities, + }) + } +} + +use uuid::Uuid; diff --git a/libs/service/workspace/init.rs b/libs/service/workspace/init.rs new file mode 100644 index 0000000..64993ce --- /dev/null +++ b/libs/service/workspace/init.rs @@ -0,0 +1,97 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::WorkspaceRole; +use models::workspaces::workspace; +use models::workspaces::workspace_membership; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Deserialize, Serialize, Clone, Debug, utoipa::ToSchema)] +pub struct WorkspaceInitParams { + pub slug: String, + pub name: String, + pub description: Option, +} + +impl AppService { + pub async fn workspace_init( + &self, + ctx: &Session, + params: WorkspaceInitParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let user = self.utils_find_user_by_uid(user_uid).await?; + + // Validate slug format: alphanumeric, dashes, underscores + if !params + .slug + .chars() + .all(|c| c.is_alphanumeric() || c == '-' || c == '_') + { + return Err(AppError::BadRequest( + "Slug must contain only letters, numbers, hyphens and underscores".to_string(), + )); + } + + // Check slug uniqueness + if workspace::Entity::find() + .filter(workspace::Column::Slug.eq(¶ms.slug)) + .filter(workspace::Column::DeletedAt.is_null()) + .one(&self.db) + .await? + .is_some() + { + return Err(AppError::WorkspaceSlugAlreadyExists); + } + + // Check name uniqueness + if workspace::Entity::find() + .filter(workspace::Column::Name.eq(¶ms.name)) + .filter(workspace::Column::DeletedAt.is_null()) + .one(&self.db) + .await? + .is_some() + { + return Err(AppError::WorkspaceNameAlreadyExists); + } + + let txn = self.db.begin().await?; + + let ws = workspace::ActiveModel { + id: Set(Uuid::now_v7()), + slug: Set(params.slug), + name: Set(params.name), + description: Set(params.description), + avatar_url: Set(None), + plan: Set("free".to_string()), + billing_email: Set(None), + stripe_customer_id: Set(None), + stripe_subscription_id: Set(None), + plan_expires_at: Set(None), + deleted_at: Set(None), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + let ws = ws.insert(&txn).await?; + + let membership = workspace_membership::ActiveModel { + id: Default::default(), + workspace_id: Set(ws.id), + user_id: Set(user.uid), + role: Set(WorkspaceRole::Owner.to_string()), + status: Set("active".to_string()), + invited_by: Set(None), + joined_at: Set(Utc::now()), + invite_token: Set(None), + invite_expires_at: Set(None), + }; + membership.insert(&txn).await?; + + txn.commit().await?; + Ok(ws) + } +} + +use uuid::Uuid; diff --git a/libs/service/workspace/members.rs b/libs/service/workspace/members.rs new file mode 100644 index 0000000..33cf064 --- /dev/null +++ b/libs/service/workspace/members.rs @@ -0,0 +1,481 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::{Duration, Utc}; +use email::EmailMessage; +use models::WorkspaceRole; +use models::users::{user, user_email}; +use models::workspaces::workspace; +use models::workspaces::workspace_membership; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceMemberInfo { + pub user_id: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub role: String, + pub joined_at: chrono::DateTime, + /// Username of the person who invited this member. + pub invited_by_username: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct PendingInvitationInfo { + pub user_id: Uuid, + pub username: String, + pub display_name: Option, + pub avatar_url: Option, + pub email: Option, + pub role: String, + pub invited_by_username: Option, + pub invited_at: chrono::DateTime, + pub expires_at: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceMembersResponse { + pub members: Vec, + pub total: u64, + pub page: u64, + pub per_page: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceInviteParams { + pub email: String, + pub role: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceInviteAcceptParams { + pub token: String, +} + +impl AppService { + pub async fn workspace_members( + &self, + ctx: &Session, + workspace_slug: String, + page: Option, + per_page: Option, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + // Check membership + let _ = self + .utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Member]) + .await; + + let page = page.unwrap_or(1); + let per_page = per_page.unwrap_or(20); + + let memberships = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::Status.eq("active")) + .order_by_desc(workspace_membership::Column::JoinedAt) + .paginate(&self.db, per_page) + .fetch_page(page - 1) + .await?; + + let total = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::Status.eq("active")) + .count(&self.db) + .await?; + + let user_ids: Vec = memberships.iter().map(|m| m.user_id).collect(); + let users = user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids)) + .all(&self.db) + .await?; + + // Collect invited_by user IDs + let inviter_ids: Vec = memberships.iter().filter_map(|m| m.invited_by).collect(); + let inviters = if !inviter_ids.is_empty() { + user::Entity::find() + .filter(user::Column::Uid.is_in(inviter_ids)) + .all(&self.db) + .await? + } else { + vec![] + }; + + let members: Vec = memberships + .into_iter() + .filter_map(|m| { + let u = users.iter().find(|u| u.uid == m.user_id)?; + let invited_by_username = m.invited_by.and_then(|uid| { + inviters + .iter() + .find(|i| i.uid == uid) + .map(|i| i.username.clone()) + }); + Some(WorkspaceMemberInfo { + user_id: u.uid, + username: u.username.clone(), + display_name: u.display_name.clone(), + avatar_url: u.avatar_url.clone(), + role: m.role, + joined_at: m.joined_at, + invited_by_username, + }) + }) + .collect(); + + Ok(WorkspaceMembersResponse { + members, + total, + page, + per_page, + }) + } + + /// List pending (invited but not accepted) memberships for a workspace. + pub async fn workspace_pending_invitations( + &self, + ctx: &Session, + workspace_slug: String, + ) -> Result, AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + let pending = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::Status.eq("pending")) + .order_by_desc(workspace_membership::Column::JoinedAt) + .all(&self.db) + .await?; + + let user_ids: Vec = pending.iter().map(|m| m.user_id).collect(); + let users = if !user_ids.is_empty() { + user::Entity::find() + .filter(user::Column::Uid.is_in(user_ids.clone())) + .all(&self.db) + .await? + } else { + vec![] + }; + + // Get email addresses for invited users + let emails: Vec<(Uuid, String)> = user_email::Entity::find() + .filter(user_email::Column::User.is_in(user_ids)) + .all(&self.db) + .await? + .into_iter() + .map(|e| (e.user, e.email)) + .collect(); + + // Get inviter usernames + let inviter_ids: Vec = pending.iter().filter_map(|m| m.invited_by).collect(); + let inviters = if !inviter_ids.is_empty() { + user::Entity::find() + .filter(user::Column::Uid.is_in(inviter_ids)) + .all(&self.db) + .await? + } else { + vec![] + }; + + let invitations: Vec = pending + .into_iter() + .filter_map(|m| { + let u = users.iter().find(|u| u.uid == m.user_id)?; + let email = emails + .iter() + .find(|(uid, _)| *uid == m.user_id) + .map(|(_, e)| e.clone()); + let invited_by_username = m.invited_by.and_then(|uid| { + inviters + .iter() + .find(|i| i.uid == uid) + .map(|i| i.username.clone()) + }); + Some(PendingInvitationInfo { + user_id: u.uid, + username: u.username.clone(), + display_name: u.display_name.clone(), + avatar_url: u.avatar_url.clone(), + email, + role: m.role, + invited_by_username, + invited_at: m.joined_at, + expires_at: m.invite_expires_at, + }) + }) + .collect(); + + Ok(invitations) + } + + /// Cancel a pending invitation (remove the pending membership record). + pub async fn workspace_cancel_invitation( + &self, + ctx: &Session, + workspace_slug: String, + target_user_id: Uuid, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + let deleted = workspace_membership::Entity::delete_many() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user_id)) + .filter(workspace_membership::Column::Status.eq("pending")) + .exec(&self.db) + .await?; + + if deleted.rows_affected == 0 { + return Err(AppError::NotFound("Invitation not found".to_string())); + } + + Ok(()) + } + + pub async fn workspace_invite_member( + &self, + ctx: &Session, + workspace_slug: String, + params: WorkspaceInviteParams, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self + .utils_find_workspace_by_slug(workspace_slug.clone()) + .await?; + + // Only owner/admin can invite + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + let inviter = self.utils_find_user_by_uid(user_uid).await?; + + // Find target user by email + let target_email = user_email::Entity::find() + .filter(user_email::Column::Email.eq(¶ms.email)) + .one(&self.db) + .await? + .ok_or(AppError::UserNotFound)?; + + let target_user = self.utils_find_user_by_uid(target_email.user).await?; + + // Check if already a member + if workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user.uid)) + .filter(workspace_membership::Column::Status.eq("active")) + .one(&self.db) + .await? + .is_some() + { + return Err(AppError::BadRequest("User is already a member".to_string())); + } + + // Generate invite token + let token = generate_invite_token(); + let expires_at = Utc::now() + Duration::days(7); + + // Create or update pending membership + let existing: Option = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user.uid)) + .one(&self.db) + .await?; + + let txn = self.db.begin().await?; + + match existing { + Some(m) => { + let mut m: workspace_membership::ActiveModel = m.into(); + m.invite_token = Set(Some(token.clone())); + m.invite_expires_at = Set(Some(expires_at)); + m.invited_by = Set(Some(user_uid)); + m.role = Set(params + .role + .unwrap_or_else(|| WorkspaceRole::Member.to_string())); + m.status = Set("pending".to_string()); + m.update(&txn).await?; + } + None => { + let m = workspace_membership::ActiveModel { + id: Default::default(), + workspace_id: Set(ws.id), + user_id: Set(target_user.uid), + role: Set(params + .role + .unwrap_or_else(|| WorkspaceRole::Member.to_string())), + status: Set("pending".to_string()), + invited_by: Set(Some(user_uid)), + joined_at: Set(Utc::now()), + invite_token: Set(Some(token.clone())), + invite_expires_at: Set(Some(expires_at)), + }; + m.insert(&txn).await?; + } + } + + txn.commit().await?; + + // Send invitation email + let domain = self + .config + .main_domain() + .map_err(|_| AppError::DoMainNotSet)?; + + let invite_link = format!( + "https://{}/auth/accept-workspace-invite?token={}", + domain, + token.clone() + ); + + let envelope = EmailMessage { + to: target_email.email.clone(), + subject: format!("You've been invited to join {}", ws.name), + body: format!( + "Hello {},\n\n\ + {} has invited you to join the workspace \"{}\".\n\n\ + Click the link below to accept the invitation:\n\ + {}\n\n\ + This invitation expires in 7 days.\n\n\ + Best regards,\n\ + GitDataAI Team", + target_user.username, inviter.username, ws.name, invite_link + ), + }; + + self.email.send(envelope).await.map_err(|e| { + AppError::InternalServerError(format!("Failed to send invitation email: {}", e)) + })?; + Ok(()) + } + + pub async fn workspace_accept_invitation( + &self, + ctx: &Session, + params: WorkspaceInviteAcceptParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::InviteToken.eq(¶ms.token)) + .one(&self.db) + .await? + .ok_or(AppError::WorkspaceInviteTokenInvalid)?; + + if membership.user_id != user_uid { + return Err(AppError::WorkspaceInviteTokenInvalid); + } + + if membership.status == "active" { + return Err(AppError::WorkspaceInviteAlreadyAccepted); + } + + if let Some(expires_at) = membership.invite_expires_at { + if Utc::now() > expires_at { + return Err(AppError::WorkspaceInviteExpired); + } + } + + let ws_id = membership.workspace_id; + let mut m: workspace_membership::ActiveModel = membership.into(); + m.status = Set("active".to_string()); + m.invite_token = Set(None); + m.invite_expires_at = Set(None); + m.update(&self.db).await?; + + self.utils_find_workspace_by_id(ws_id).await + } + + pub async fn workspace_remove_member( + &self, + ctx: &Session, + workspace_slug: String, + target_user_id: Uuid, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + // Only owner/admin can remove members + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + // Cannot remove owner + let target_membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + if target_membership.role == WorkspaceRole::Owner.to_string() { + return Err(AppError::BadRequest( + "Cannot remove workspace owner".to_string(), + )); + } + + workspace_membership::Entity::delete_many() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user_id)) + .exec(&self.db) + .await?; + + Ok(()) + } + + pub async fn workspace_update_member_role( + &self, + ctx: &Session, + workspace_slug: String, + target_user_id: Uuid, + new_role: String, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + let target_role: WorkspaceRole = new_role.parse().map_err(|_| AppError::RoleParseError)?; + + let membership = workspace_membership::Entity::find() + .filter(workspace_membership::Column::WorkspaceId.eq(ws.id)) + .filter(workspace_membership::Column::UserId.eq(target_user_id)) + .one(&self.db) + .await? + .ok_or(AppError::NotWorkspaceMember)?; + + // Cannot demote owner + if membership.role == WorkspaceRole::Owner.to_string() + && target_role != WorkspaceRole::Owner + { + return Err(AppError::BadRequest( + "Cannot demote workspace owner".to_string(), + )); + } + + let mut m: workspace_membership::ActiveModel = membership.into(); + m.role = Set(new_role); + m.update(&self.db).await?; + Ok(()) + } +} + +fn generate_invite_token() -> String { + use rand::RngExt; + use rand::distr::Alphanumeric; + let token: String = rand::rng() + .sample_iter(Alphanumeric) + .take(64) + .map(char::from) + .collect(); + format!("ws_inv_{}", token) +} diff --git a/libs/service/workspace/mod.rs b/libs/service/workspace/mod.rs new file mode 100644 index 0000000..2793496 --- /dev/null +++ b/libs/service/workspace/mod.rs @@ -0,0 +1,5 @@ +pub mod billing; +pub mod info; +pub mod init; +pub mod members; +pub mod settings; diff --git a/libs/service/workspace/settings.rs b/libs/service/workspace/settings.rs new file mode 100644 index 0000000..63959c7 --- /dev/null +++ b/libs/service/workspace/settings.rs @@ -0,0 +1,82 @@ +use crate::AppService; +use crate::error::AppError; +use chrono::Utc; +use models::workspaces::{WorkspaceRole, workspace}; +use sea_orm::*; +use serde::{Deserialize, Serialize}; +use session::Session; + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct WorkspaceUpdateParams { + pub name: Option, + pub description: Option, + pub avatar_url: Option, + pub billing_email: Option, +} + +impl AppService { + pub async fn workspace_update( + &self, + ctx: &Session, + workspace_slug: String, + params: WorkspaceUpdateParams, + ) -> Result { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self + .utils_find_workspace_by_slug(workspace_slug.clone()) + .await?; + + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Admin]) + .await?; + + let mut m: workspace::ActiveModel = ws.into(); + + if let Some(name) = params.name { + // Check name uniqueness + if workspace::Entity::find() + .filter(workspace::Column::Name.eq(&name)) + .filter(workspace::Column::DeletedAt.is_null()) + .filter(workspace::Column::Id.ne(m.id.clone().unwrap())) + .one(&self.db) + .await? + .is_some() + { + return Err(AppError::WorkspaceNameAlreadyExists); + } + m.name = Set(name); + } + + if let Some(description) = params.description { + m.description = Set(Some(description)); + } + if let Some(avatar_url) = params.avatar_url { + m.avatar_url = Set(Some(avatar_url)); + } + if let Some(billing_email) = params.billing_email { + m.billing_email = Set(Some(billing_email)); + } + + m.updated_at = Set(Utc::now()); + m.update(&self.db).await.map_err(Into::into) + } + + /// Soft-delete a workspace. Only owner can delete. + pub async fn workspace_delete( + &self, + ctx: &Session, + workspace_slug: String, + ) -> Result<(), AppError> { + let user_uid = ctx.user().ok_or(AppError::Unauthorized)?; + let ws = self.utils_find_workspace_by_slug(workspace_slug).await?; + + self.utils_check_workspace_permission(ws.id, user_uid, &[WorkspaceRole::Owner]) + .await?; + + let mut m: workspace::ActiveModel = ws.into(); + m.deleted_at = Set(Some(Utc::now())); + m.updated_at = Set(Utc::now()); + m.update(&self.db).await?; + + Ok(()) + } +} diff --git a/libs/service/ws_token.rs b/libs/service/ws_token.rs new file mode 100644 index 0000000..c5c9842 --- /dev/null +++ b/libs/service/ws_token.rs @@ -0,0 +1,103 @@ +use std::sync::Arc; + +use chrono::{Duration, Utc}; +use deadpool_redis::cluster::Connection; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::AppError; + +/// Token payload stored in Redis +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WsTokenData { + pub user_id: Uuid, + pub expires_at: chrono::DateTime, + pub created_at: chrono::DateTime, +} + +const WS_TOKEN_PREFIX: &str = "ws_token:"; +pub const WS_TOKEN_TTL_SECONDS: i64 = 300; // Token valid for 5 minutes + +/// Service for managing WebSocket connection tokens +pub struct WsTokenService { + get_redis: Arc tokio::task::JoinHandle> + Send + Sync>, +} + +impl WsTokenService { + pub fn new( + get_redis: Arc< + dyn Fn() -> tokio::task::JoinHandle> + Send + Sync, + >, + ) -> Self { + Self { get_redis } + } + + /// Generate a new WebSocket token for the given user + pub async fn generate_token(&self, user_id: Uuid) -> Result { + let token = Self::random_token(); + let now = Utc::now(); + let token_data = WsTokenData { + user_id, + expires_at: now + Duration::seconds(WS_TOKEN_TTL_SECONDS), + created_at: now, + }; + + let json = serde_json::to_string(&token_data).map_err(|e| { + AppError::InternalServerError(format!("Failed to serialize ws token: {}", e)) + })?; + + let key = format!("{}{}", WS_TOKEN_PREFIX, token); + let mut conn = self.get_connection().await?; + + // Set token in Redis with TTL + redis::cmd("SETEX") + .arg(&key) + .arg(WS_TOKEN_TTL_SECONDS) + .arg(&json) + .query_async::<()>(&mut conn) + .await + .map_err(|e| { + AppError::InternalServerError(format!("Failed to store ws token: {}", e)) + })?; + + Ok(token) + } + + pub async fn validate_token(&self, token: &str) -> Result { + let key = format!("{}{}", WS_TOKEN_PREFIX, token); + let mut conn = self.get_connection().await?; + + // Get and delete token atomically (one-time use) + let json: Option = redis::cmd("GETDEL") + .arg(&key) + .query_async(&mut conn) + .await + .map_err(|e| { + AppError::InternalServerError(format!("Failed to validate ws token: {}", e)) + })?; + + let token_data = json.ok_or_else(|| AppError::Unauthorized)?; + + let ws_token_data: WsTokenData = serde_json::from_str(&token_data).map_err(|e| { + AppError::InternalServerError(format!("Failed to deserialize ws token: {}", e)) + })?; + + if Utc::now() > ws_token_data.expires_at { + return Err(AppError::Unauthorized); + } + + Ok(ws_token_data.user_id) + } + + fn random_token() -> String { + let bytes: [u8; 32] = rand::random(); + hex::encode(bytes) + } + + async fn get_connection(&self) -> Result { + (self.get_redis)() + .await + .map_err(|e| AppError::InternalServerError(format!("Redis join error: {}", e)))? + .map_err(|e| AppError::InternalServerError(format!("Redis connection error: {}", e))) + } +} diff --git a/libs/session/Cargo.toml b/libs/session/Cargo.toml new file mode 100644 index 0000000..e515467 --- /dev/null +++ b/libs/session/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "session" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "session" +[dependencies] +actix-service = { workspace = true } +actix-utils = { workspace = true } +actix-web = { workspace = true, features = ["cookies", "secure-cookies"] } + +anyhow = { workspace = true } +derive_more = { workspace = true, features = ["display", "error", "from"] } +rand = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +uuid = { workspace = true,features = ["serde","v7","v4"] } +redis = { workspace = true, features = ["tokio-comp", "connection-manager", "cluster"] } +deadpool-redis = { workspace = true } +tokio = { workspace = true, features = ["rt-multi-thread", "sync"] } +[lints] +workspace = true diff --git a/libs/session/config.rs b/libs/session/config.rs new file mode 100644 index 0000000..6faf3b4 --- /dev/null +++ b/libs/session/config.rs @@ -0,0 +1,213 @@ +use actix_web::cookie::{Key, SameSite, time::Duration}; +use derive_more::derive::From; + +use crate::{SessionMiddleware, storage::SessionStore}; + +#[derive(Debug, Clone, From)] +#[non_exhaustive] +pub enum SessionLifecycle { + BrowserSession(BrowserSession), + PersistentSession(PersistentSession), +} + +#[derive(Debug, Clone)] +pub struct BrowserSession { + state_ttl: Duration, + state_ttl_extension_policy: TtlExtensionPolicy, +} + +impl BrowserSession { + pub fn state_ttl(mut self, ttl: Duration) -> Self { + self.state_ttl = ttl; + self + } + + pub fn state_ttl_extension_policy(mut self, ttl_extension_policy: TtlExtensionPolicy) -> Self { + self.state_ttl_extension_policy = ttl_extension_policy; + self + } +} + +impl Default for BrowserSession { + fn default() -> Self { + Self { + state_ttl: default_ttl(), + state_ttl_extension_policy: default_ttl_extension_policy(), + } + } +} + +#[derive(Debug, Clone)] +pub struct PersistentSession { + session_ttl: Duration, + ttl_extension_policy: TtlExtensionPolicy, +} + +impl PersistentSession { + #[doc(alias = "max_age", alias = "max age", alias = "expires")] + pub fn session_ttl(mut self, session_ttl: Duration) -> Self { + self.session_ttl = session_ttl; + self + } + + pub fn session_ttl_extension_policy( + mut self, + ttl_extension_policy: TtlExtensionPolicy, + ) -> Self { + self.ttl_extension_policy = ttl_extension_policy; + self + } +} + +impl Default for PersistentSession { + fn default() -> Self { + Self { + session_ttl: default_ttl(), + ttl_extension_policy: default_ttl_extension_policy(), + } + } +} + +#[derive(Debug, Clone)] +#[non_exhaustive] +pub enum TtlExtensionPolicy { + OnEveryRequest, + OnStateChanges, +} + +#[derive(Debug, Clone, Copy)] +pub enum CookieContentSecurity { + Private, + Signed, +} + +pub(crate) const fn default_ttl() -> Duration { + Duration::days(1) +} + +pub(crate) const fn default_ttl_extension_policy() -> TtlExtensionPolicy { + TtlExtensionPolicy::OnStateChanges +} + +#[must_use] +pub struct SessionMiddlewareBuilder { + storage_backend: Store, + configuration: Configuration, +} + +impl SessionMiddlewareBuilder { + pub(crate) fn new(store: Store, configuration: Configuration) -> Self { + Self { + storage_backend: store, + configuration, + } + } + + pub fn cookie_name(mut self, name: String) -> Self { + self.configuration.cookie.name = name; + self + } + + pub fn cookie_secure(mut self, secure: bool) -> Self { + self.configuration.cookie.secure = secure; + self + } + + pub fn session_lifecycle>(mut self, session_lifecycle: S) -> Self { + match session_lifecycle.into() { + SessionLifecycle::BrowserSession(BrowserSession { + state_ttl, + state_ttl_extension_policy, + }) => { + self.configuration.cookie.max_age = None; + self.configuration.session.state_ttl = state_ttl; + self.configuration.ttl_extension_policy = state_ttl_extension_policy; + } + SessionLifecycle::PersistentSession(PersistentSession { + session_ttl, + ttl_extension_policy, + }) => { + self.configuration.cookie.max_age = Some(session_ttl); + self.configuration.session.state_ttl = session_ttl; + self.configuration.ttl_extension_policy = ttl_extension_policy; + } + } + + self + } + + pub fn cookie_same_site(mut self, same_site: SameSite) -> Self { + self.configuration.cookie.same_site = same_site; + self + } + + pub fn cookie_path(mut self, path: String) -> Self { + self.configuration.cookie.path = path; + self + } + + pub fn cookie_domain(mut self, domain: Option) -> Self { + self.configuration.cookie.domain = domain; + self + } + + pub fn cookie_content_security(mut self, content_security: CookieContentSecurity) -> Self { + self.configuration.cookie.content_security = content_security; + self + } + + pub fn cookie_http_only(mut self, http_only: bool) -> Self { + self.configuration.cookie.http_only = http_only; + self + } + + #[must_use] + pub fn build(self) -> SessionMiddleware { + SessionMiddleware::from_parts(self.storage_backend, self.configuration) + } +} + +#[derive(Clone)] +pub(crate) struct Configuration { + pub(crate) cookie: CookieConfiguration, + pub(crate) session: SessionConfiguration, + pub(crate) ttl_extension_policy: TtlExtensionPolicy, +} + +#[derive(Clone)] +pub(crate) struct SessionConfiguration { + pub(crate) state_ttl: Duration, +} + +#[derive(Clone)] +pub(crate) struct CookieConfiguration { + pub(crate) secure: bool, + pub(crate) http_only: bool, + pub(crate) name: String, + pub(crate) same_site: SameSite, + pub(crate) path: String, + pub(crate) domain: Option, + pub(crate) max_age: Option, + pub(crate) content_security: CookieContentSecurity, + pub(crate) key: Key, +} + +pub(crate) fn default_configuration(key: Key) -> Configuration { + Configuration { + cookie: CookieConfiguration { + secure: true, + http_only: true, + name: "id".into(), + same_site: SameSite::Lax, + path: "/".into(), + domain: None, + max_age: None, + content_security: CookieContentSecurity::Private, + key, + }, + session: SessionConfiguration { + state_ttl: default_ttl(), + }, + ttl_extension_policy: default_ttl_extension_policy(), + } +} diff --git a/libs/session/lib.rs b/libs/session/lib.rs new file mode 100644 index 0000000..3f545e6 --- /dev/null +++ b/libs/session/lib.rs @@ -0,0 +1,15 @@ +#![forbid(unsafe_code)] + +pub mod config; +mod middleware; +mod session; +mod session_ext; +pub mod storage; + +pub use self::{ + middleware::SessionMiddleware, + session::{ + Session, SessionGetError, SessionInsertError, SessionStatus, SessionUser, SessionWorkspace, + }, + session_ext::SessionExt, +}; diff --git a/libs/session/middleware.rs b/libs/session/middleware.rs new file mode 100644 index 0000000..06dd311 --- /dev/null +++ b/libs/session/middleware.rs @@ -0,0 +1,319 @@ +use std::{fmt, future::Future, pin::Pin, rc::Rc}; + +use actix_utils::future::{Ready, ready}; +use actix_web::{ + HttpResponse, + body::MessageBody, + cookie::{Cookie, CookieJar, Key}, + dev::{ResponseHead, Service, ServiceRequest, ServiceResponse, Transform, forward_ready}, + http::header::{HeaderValue, SET_COOKIE}, +}; +use anyhow::Context; +use serde_json::{Map, Value}; + +use crate::{ + Session, SessionStatus, + config::{ + self, Configuration, CookieConfiguration, CookieContentSecurity, SessionMiddlewareBuilder, + TtlExtensionPolicy, + }, + storage::{LoadError, SessionKey, SessionStore}, +}; + +#[derive(Clone)] +pub struct SessionMiddleware { + storage_backend: Rc, + configuration: Rc, +} + +impl SessionMiddleware { + pub fn new(store: Store, key: Key) -> Self { + Self::builder(store, key).build() + } + + pub fn builder(store: Store, key: Key) -> SessionMiddlewareBuilder { + SessionMiddlewareBuilder::new(store, config::default_configuration(key)) + } + + pub(crate) fn from_parts(store: Store, configuration: Configuration) -> Self { + Self { + storage_backend: Rc::new(store), + configuration: Rc::new(configuration), + } + } +} + +impl Transform for SessionMiddleware +where + S: Service, Error = actix_web::Error> + 'static, + S::Future: 'static, + B: MessageBody + 'static, + Store: SessionStore + 'static, +{ + type Response = ServiceResponse; + type Error = actix_web::Error; + type Transform = InnerSessionMiddleware; + type InitError = (); + type Future = Ready>; + + fn new_transform(&self, service: S) -> Self::Future { + ready(Ok(InnerSessionMiddleware { + service: Rc::new(service), + configuration: Rc::clone(&self.configuration), + storage_backend: Rc::clone(&self.storage_backend), + })) + } +} + +fn e500(err: E) -> actix_web::Error { + actix_web::error::InternalError::from_response( + err, + HttpResponse::InternalServerError().finish(), + ) + .into() +} + +#[doc(hidden)] +#[non_exhaustive] +pub struct InnerSessionMiddleware { + service: Rc, + configuration: Rc, + storage_backend: Rc, +} + +impl Service for InnerSessionMiddleware +where + S: Service, Error = actix_web::Error> + 'static, + S::Future: 'static, + Store: SessionStore + 'static, +{ + type Response = ServiceResponse; + type Error = actix_web::Error; + #[allow(clippy::type_complexity)] + type Future = Pin>>>; + + forward_ready!(service); + + fn call(&self, mut req: ServiceRequest) -> Self::Future { + let service = Rc::clone(&self.service); + let storage_backend = Rc::clone(&self.storage_backend); + let configuration = Rc::clone(&self.configuration); + + Box::pin(async move { + let session_key = extract_session_key(&req, &configuration.cookie); + let (session_key, session_state) = + load_session_state(session_key, storage_backend.as_ref()).await?; + + Session::set_session(&mut req, session_state); + + let mut res = service.call(req).await?; + let (status, session_state) = Session::get_changes(&mut res); + + match session_key { + None => { + if !session_state.is_empty() { + let session_key = storage_backend + .save(session_state, &configuration.session.state_ttl) + .await + .map_err(e500)?; + + set_session_cookie( + res.response_mut().head_mut(), + session_key, + &configuration.cookie, + ) + .map_err(e500)?; + } + } + + Some(session_key) => { + match status { + SessionStatus::Changed => { + let session_key = storage_backend + .update( + session_key, + session_state, + &configuration.session.state_ttl, + ) + .await + .map_err(e500)?; + + set_session_cookie( + res.response_mut().head_mut(), + session_key, + &configuration.cookie, + ) + .map_err(e500)?; + } + + SessionStatus::Purged => { + storage_backend.delete(&session_key).await.map_err(e500)?; + + delete_session_cookie( + res.response_mut().head_mut(), + &configuration.cookie, + ) + .map_err(e500)?; + } + + SessionStatus::Renewed => { + storage_backend.delete(&session_key).await.map_err(e500)?; + + let session_key = storage_backend + .save(session_state, &configuration.session.state_ttl) + .await + .map_err(e500)?; + + set_session_cookie( + res.response_mut().head_mut(), + session_key, + &configuration.cookie, + ) + .map_err(e500)?; + } + + SessionStatus::Unchanged => { + if matches!( + configuration.ttl_extension_policy, + TtlExtensionPolicy::OnEveryRequest + ) { + storage_backend + .update_ttl(&session_key, &configuration.session.state_ttl) + .await + .map_err(e500)?; + + if configuration.cookie.max_age.is_some() { + set_session_cookie( + res.response_mut().head_mut(), + session_key, + &configuration.cookie, + ) + .map_err(e500)?; + } + } + } + }; + } + } + + Ok(res) + }) + } +} + +fn extract_session_key(req: &ServiceRequest, config: &CookieConfiguration) -> Option { + let cookies = match req.cookies() { + Ok(cookies) => cookies, + Err(_e) => { + return None; + } + }; + let session_cookie = cookies + .iter() + .find(|&cookie| cookie.name() == config.name)?; + + let mut jar = CookieJar::new(); + jar.add_original(session_cookie.clone()); + + let verification_result = match config.content_security { + CookieContentSecurity::Signed => jar.signed(&config.key).get(&config.name), + CookieContentSecurity::Private => jar.private(&config.key).get(&config.name), + }; + + if verification_result.is_none() {} + + match verification_result?.value().to_owned().try_into() { + Ok(session_key) => Some(session_key), + Err(_err) => None, + } +} + +async fn load_session_state( + session_key: Option, + storage_backend: &Store, +) -> Result<(Option, Map), actix_web::Error> { + if let Some(session_key) = session_key { + match storage_backend.load(&session_key).await { + Ok(state) => { + if let Some(state) = state { + Ok((Some(session_key), state)) + } else { + Ok((None, Map::new())) + } + } + + Err(_err) => match _err { + LoadError::Deserialization(_err) => Ok((Some(session_key), Map::new())), + + LoadError::Other(err) => Err(e500(err)), + }, + } + } else { + Ok((None, Map::new())) + } +} + +fn set_session_cookie( + response: &mut ResponseHead, + session_key: SessionKey, + config: &CookieConfiguration, +) -> Result<(), anyhow::Error> { + let value: String = session_key.into(); + let mut cookie = Cookie::new(config.name.clone(), value); + + cookie.set_secure(config.secure); + cookie.set_http_only(config.http_only); + cookie.set_same_site(config.same_site); + cookie.set_path(config.path.clone()); + + if let Some(max_age) = config.max_age { + cookie.set_max_age(max_age); + } + + if let Some(ref domain) = config.domain { + cookie.set_domain(domain.clone()); + } + + let mut jar = CookieJar::new(); + match config.content_security { + CookieContentSecurity::Signed => jar.signed_mut(&config.key).add(cookie), + CookieContentSecurity::Private => jar.private_mut(&config.key).add(cookie), + } + + let cookie = jar + .delta() + .next() + .expect("delta iterator should contain cookie after add"); + let val = HeaderValue::from_str(&cookie.encoded().to_string()) + .context("Failed to attach a session cookie to the outgoing response")?; + + response.headers_mut().append(SET_COOKIE, val); + + Ok(()) +} + +fn delete_session_cookie( + response: &mut ResponseHead, + config: &CookieConfiguration, +) -> Result<(), anyhow::Error> { + let removal_cookie = Cookie::build(config.name.clone(), "") + .path(config.path.clone()) + .secure(config.secure) + .http_only(config.http_only) + .same_site(config.same_site); + + let mut removal_cookie = if let Some(ref domain) = config.domain { + removal_cookie.domain(domain) + } else { + removal_cookie + } + .finish(); + + removal_cookie.make_removal(); + + let val = HeaderValue::from_str(&removal_cookie.to_string()) + .context("Failed to attach a session removal cookie to the outgoing response")?; + response.headers_mut().append(SET_COOKIE, val); + + Ok(()) +} diff --git a/libs/session/session.rs b/libs/session/session.rs new file mode 100644 index 0000000..43e52aa --- /dev/null +++ b/libs/session/session.rs @@ -0,0 +1,403 @@ +use std::{ + cell::{Ref, RefCell}, + convert::Infallible, + error::Error as StdError, + future::Future, + mem, + pin::Pin, + rc::Rc, +}; + +use actix_utils::future::{Ready, ready}; +use actix_web::{ + FromRequest, HttpMessage, HttpRequest, HttpResponse, ResponseError, + body::BoxBody, + dev::{Extensions, Payload, ServiceRequest, ServiceResponse}, +}; +use anyhow::Context; +use derive_more::derive::{Display, From}; +use serde::{Serialize, de::DeserializeOwned}; +use serde_json::{Map, Value}; +use uuid::Uuid; + +const SESSION_USER_KEY: &str = "session:user_uid"; +const SESSION_WORKSPACE_KEY: &str = "session:workspace_id"; + +#[derive(Clone)] +pub struct Session(Rc>); + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum SessionStatus { + Changed, + Purged, + Renewed, + #[default] + Unchanged, +} + +#[derive(Default)] +struct SessionInner { + state: Map, + status: SessionStatus, +} + +impl Session { + pub fn get(&self, key: &str) -> Result, SessionGetError> { + if let Some(value) = self.0.borrow().state.get(key) { + Ok(Some( + serde_json::from_value::(value.clone()) + .with_context(|| { + format!( + "Failed to deserialize the JSON-encoded session data attached to key \ + `{}` as a `{}` type", + key, + std::any::type_name::() + ) + }) + .map_err(SessionGetError)?, + )) + } else { + Ok(None) + } + } + + pub fn contains_key(&self, key: &str) -> bool { + self.0.borrow().state.contains_key(key) + } + + pub fn entries(&self) -> Ref<'_, Map> { + Ref::map(self.0.borrow(), |inner| &inner.state) + } + + pub fn status(&self) -> SessionStatus { + Ref::map(self.0.borrow(), |inner| &inner.status).clone() + } + + pub fn insert( + &self, + key: impl Into, + value: T, + ) -> Result<(), SessionInsertError> { + let mut inner = self.0.borrow_mut(); + + if inner.status != SessionStatus::Purged { + if inner.status != SessionStatus::Renewed { + inner.status = SessionStatus::Changed; + } + + let key = key.into(); + let val = serde_json::to_value(&value) + .with_context(|| { + format!( + "Failed to serialize the provided `{}` type instance as JSON in order to \ + attach as session data to the `{key}` key", + std::any::type_name::(), + ) + }) + .map_err(SessionInsertError)?; + + inner.state.insert(key, val); + } + + Ok(()) + } + + pub fn update( + &self, + key: impl Into, + updater: F, + ) -> Result<(), SessionUpdateError> + where + F: FnOnce(T) -> T, + { + let mut inner = self.0.borrow_mut(); + let key_str = key.into(); + + if let Some(val) = inner.state.get(&key_str) { + let value = serde_json::from_value(val.clone()) + .with_context(|| { + format!( + "Failed to deserialize the JSON-encoded session data attached to key \ + `{key_str}` as a `{}` type", + std::any::type_name::() + ) + }) + .map_err(SessionUpdateError)?; + + let val = serde_json::to_value(updater(value)) + .with_context(|| { + format!( + "Failed to serialize the provided `{}` type instance as JSON in order to \ + attach as session data to the `{key_str}` key", + std::any::type_name::(), + ) + }) + .map_err(SessionUpdateError)?; + + inner.state.insert(key_str, val); + } + + Ok(()) + } + + pub fn update_or( + &self, + key: &str, + default_value: T, + updater: F, + ) -> Result<(), SessionUpdateError> + where + F: FnOnce(T) -> T, + { + if self.contains_key(key) { + self.update(key, updater) + } else { + self.insert(key, default_value) + .map_err(|err| SessionUpdateError(err.into())) + } + } + + pub fn remove(&self, key: &str) -> Option { + let mut inner = self.0.borrow_mut(); + + if inner.status != SessionStatus::Purged { + if inner.status != SessionStatus::Renewed { + inner.status = SessionStatus::Changed; + } + return inner.state.remove(key); + } + + None + } + + pub fn remove_as(&self, key: &str) -> Option> { + self.remove(key) + .map(|value| match serde_json::from_value::(value.clone()) { + Ok(val) => Ok(val), + Err(_err) => Err(value), + }) + } + + pub fn clear(&self) { + let mut inner = self.0.borrow_mut(); + + if inner.status != SessionStatus::Purged { + if inner.status != SessionStatus::Renewed { + inner.status = SessionStatus::Changed; + } + inner.state.clear() + } + } + + pub fn purge(&self) { + let mut inner = self.0.borrow_mut(); + inner.status = SessionStatus::Purged; + inner.state.clear(); + } + + pub fn renew(&self) { + let mut inner = self.0.borrow_mut(); + + if inner.status != SessionStatus::Purged { + inner.status = SessionStatus::Renewed; + } + } + + pub fn user(&self) -> Option { + self.get::(SESSION_USER_KEY).ok().flatten() + } + + pub fn set_user(&self, uid: Uuid) { + let _ = self.insert(SESSION_USER_KEY, uid); + } + + pub fn clear_user(&self) { + let _ = self.remove(SESSION_USER_KEY); + } + + pub fn current_workspace_id(&self) -> Option { + self.get::(SESSION_WORKSPACE_KEY).ok().flatten() + } + + pub fn set_current_workspace_id(&self, id: Uuid) { + let _ = self.insert(SESSION_WORKSPACE_KEY, id); + } + + pub fn clear_current_workspace_id(&self) { + let _ = self.remove(SESSION_WORKSPACE_KEY); + } + + pub fn ip_address(&self) -> Option { + self.get::("session:ip_address").ok().flatten() + } + + pub fn user_agent(&self) -> Option { + self.get::("session:user_agent").ok().flatten() + } + + pub fn set_request_info(req: &HttpRequest) { + let extensions = req.extensions_mut(); + if let Some(inner) = extensions.get::>>() { + let mut inner = inner.borrow_mut(); + if let Some(ua) = req.headers().get("user-agent") { + if let Ok(ua) = ua.to_str() { + let _ = inner + .state + .insert("session:user_agent".to_string(), serde_json::json!(ua)); + } + } + let addr = req + .connection_info() + .realip_remote_addr() + .map(|s| s.to_string()); + if let Some(ip) = addr { + let _ = inner + .state + .insert("session:ip_address".to_string(), serde_json::json!(ip)); + } + } + } + + #[allow(clippy::needless_pass_by_ref_mut)] + pub(crate) fn set_session( + req: &mut ServiceRequest, + data: impl IntoIterator, + ) { + let session = Session::get_session(&mut req.extensions_mut()); + let mut inner = session.0.borrow_mut(); + inner.state.extend(data); + } + + #[allow(clippy::needless_pass_by_ref_mut)] + pub(crate) fn get_changes( + res: &mut ServiceResponse, + ) -> (SessionStatus, Map) { + if let Some(s_impl) = res + .request() + .extensions() + .get::>>() + { + let state = mem::take(&mut s_impl.borrow_mut().state); + (s_impl.borrow().status.clone(), state) + } else { + (SessionStatus::Unchanged, Map::new()) + } + } + + /// This is used internally by the FromRequest impl, but also exposed for WS/manual usage. + pub fn get_session(extensions: &mut Extensions) -> Session { + if let Some(s_impl) = extensions.get::>>() { + return Session(Rc::clone(s_impl)); + } + + let inner = Rc::new(RefCell::new(SessionInner::default())); + extensions.insert(inner.clone()); + + Session(inner) + } +} + +impl FromRequest for Session { + type Error = Infallible; + type Future = Ready>; + + #[inline] + fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { + ready(Ok(Session::get_session(&mut req.extensions_mut()))) + } +} + +/// Extractor for the authenticated user ID from session. +/// Fails with 401 if the session has no logged-in user. +#[derive(Clone, Copy)] +pub struct SessionUser(pub Uuid); + +impl FromRequest for SessionUser { + type Error = SessionGetError; + type Future = Pin>>>; + + fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { + let req = req.clone(); + Box::pin(async move { + let uid = { + let mut extensions = req.extensions_mut(); + let session = Session::get_session(&mut extensions); + session + .user() + .ok_or_else(|| SessionGetError(anyhow::anyhow!("not authenticated")))? + }; + Ok(SessionUser(uid)) + }) + } +} + +/// Extractor for the current workspace ID from session. +/// Returns None if no workspace is selected (workspace selection is optional). +#[derive(Clone, Copy)] +pub struct SessionWorkspace(pub Option); + +impl FromRequest for SessionWorkspace { + type Error = Infallible; + type Future = Ready>; + + #[inline] + fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { + let mut extensions = req.extensions_mut(); + let session = Session::get_session(&mut extensions); + ready(Ok(SessionWorkspace(session.current_workspace_id()))) + } +} + +#[derive(Debug, Display, From)] +#[display("{_0}")] +pub struct SessionGetError(anyhow::Error); + +impl StdError for SessionGetError { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + Some(self.0.as_ref()) + } +} + +impl ResponseError for SessionGetError { + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()) + .content_type("text/plain") + .body(self.to_string()) + } +} + +#[derive(Debug, Display, From)] +#[display("{_0}")] +pub struct SessionInsertError(anyhow::Error); + +impl StdError for SessionInsertError { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + Some(self.0.as_ref()) + } +} + +impl ResponseError for SessionInsertError { + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()) + .content_type("text/plain") + .body(self.to_string()) + } +} + +#[derive(Debug, Display, From)] +#[display("{_0}")] +pub struct SessionUpdateError(anyhow::Error); + +impl StdError for SessionUpdateError { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + Some(self.0.as_ref()) + } +} + +impl ResponseError for SessionUpdateError { + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()) + .content_type("text/plain") + .body(self.to_string()) + } +} diff --git a/libs/session/session_ext.rs b/libs/session/session_ext.rs new file mode 100644 index 0000000..318fb2f --- /dev/null +++ b/libs/session/session_ext.rs @@ -0,0 +1,35 @@ +use actix_web::{ + HttpMessage, HttpRequest, + dev::{ServiceRequest, ServiceResponse}, + guard::GuardContext, +}; + +use crate::Session; + +pub trait SessionExt { + fn get_session(&self) -> Session; +} + +impl SessionExt for HttpRequest { + fn get_session(&self) -> Session { + Session::get_session(&mut self.extensions_mut()) + } +} + +impl SessionExt for ServiceRequest { + fn get_session(&self) -> Session { + Session::get_session(&mut self.extensions_mut()) + } +} + +impl SessionExt for ServiceResponse { + fn get_session(&self) -> Session { + self.request().get_session() + } +} + +impl SessionExt for GuardContext<'_> { + fn get_session(&self) -> Session { + Session::get_session(&mut self.req_data_mut()) + } +} diff --git a/libs/session/storage/format.rs b/libs/session/storage/format.rs new file mode 100644 index 0000000..916c371 --- /dev/null +++ b/libs/session/storage/format.rs @@ -0,0 +1,75 @@ +use std::collections::HashMap; + +use serde::ser::{Serialize, SerializeMap, Serializer}; +use serde_json::{Map, Value}; + +use super::interface::SessionState; + +const SESSION_STATE_FORMAT_VERSION: u8 = 1; + +#[derive(Debug)] +struct StoredSessionStateRef<'a> { + state: &'a SessionState, +} + +impl Serialize for StoredSessionStateRef<'_> { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("v", &SESSION_STATE_FORMAT_VERSION)?; + map.serialize_entry("state", self.state)?; + map.end() + } +} + +pub(crate) fn serialize_session_state( + session_state: &SessionState, +) -> Result { + let stored = StoredSessionStateRef { + state: session_state, + }; + + serde_json::to_string(&stored).map_err(anyhow::Error::new) +} + +pub(crate) fn deserialize_session_state(value: &str) -> Result { + let value = serde_json::from_str::(value)?; + + let Value::Object(mut obj) = value else { + anyhow::bail!("Session state is not a JSON object"); + }; + + if matches!(obj.get("state"), Some(Value::Object(_))) { + if let Some(Value::Number(v)) = obj.get("v") { + let v = v + .as_u64() + .ok_or_else(|| anyhow::anyhow!("Invalid session state format version"))?; + let v = u8::try_from(v) + .map_err(|_| anyhow::anyhow!("Invalid session state format version"))?; + anyhow::ensure!( + v == SESSION_STATE_FORMAT_VERSION, + "Unsupported session state format version: {}", + v + ); + + let Some(Value::Object(state)) = obj.remove("state") else { + unreachable!("`state` was checked to be an object above"); + }; + return Ok(state); + } + } + + if obj.values().all(Value::is_string) { + let legacy: HashMap = serde_json::from_value(Value::Object(obj))?; + let mut migrated: Map = Map::new(); + for (key, json_encoded) in legacy { + migrated.insert(key, serde_json::from_str::(&json_encoded)?); + } + + return Ok(migrated); + } + + Ok(obj) +} diff --git a/libs/session/storage/interface.rs b/libs/session/storage/interface.rs new file mode 100644 index 0000000..7e26d2b --- /dev/null +++ b/libs/session/storage/interface.rs @@ -0,0 +1,91 @@ +use std::future::Future; + +use actix_web::cookie::time::Duration; +use derive_more::derive::Display; +use serde_json::{Map, Value}; + +use super::SessionKey; + +pub(crate) type SessionState = Map; + +pub trait SessionStore { + fn load( + &self, + session_key: &SessionKey, + ) -> impl Future, LoadError>>; + + fn save( + &self, + session_state: SessionState, + ttl: &Duration, + ) -> impl Future>; + + fn update( + &self, + session_key: SessionKey, + session_state: SessionState, + ttl: &Duration, + ) -> impl Future>; + + fn update_ttl( + &self, + session_key: &SessionKey, + ttl: &Duration, + ) -> impl Future>; + + fn delete(&self, session_key: &SessionKey) -> impl Future>; +} + +#[derive(Debug, Display)] +pub enum LoadError { + #[display("Failed to deserialize session state")] + Deserialization(anyhow::Error), + + #[display("Something went wrong when retrieving the session state")] + Other(anyhow::Error), +} + +impl std::error::Error for LoadError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Self::Deserialization(err) => Some(err.as_ref()), + Self::Other(err) => Some(err.as_ref()), + } + } +} + +#[derive(Debug, Display)] +pub enum SaveError { + #[display("Failed to serialize session state")] + Serialization(anyhow::Error), + + #[display("Something went wrong when persisting the session state")] + Other(anyhow::Error), +} + +impl std::error::Error for SaveError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Self::Serialization(err) => Some(err.as_ref()), + Self::Other(err) => Some(err.as_ref()), + } + } +} + +#[derive(Debug, Display)] +pub enum UpdateError { + #[display("Failed to serialize session state")] + Serialization(anyhow::Error), + + #[display("Something went wrong when updating the session state.")] + Other(anyhow::Error), +} + +impl std::error::Error for UpdateError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Self::Serialization(err) => Some(err.as_ref()), + Self::Other(err) => Some(err.as_ref()), + } + } +} diff --git a/libs/session/storage/mod.rs b/libs/session/storage/mod.rs new file mode 100644 index 0000000..56803e7 --- /dev/null +++ b/libs/session/storage/mod.rs @@ -0,0 +1,14 @@ +//! Pluggable storage backends for session state. + +mod format; +mod interface; +mod redis_cluster; +mod session_key; +mod utils; + +pub use self::redis_cluster::RedisClusterSessionStore; +pub use self::{ + interface::{LoadError, SaveError, SessionStore, UpdateError}, + session_key::SessionKey, + utils::generate_session_key, +}; diff --git a/libs/session/storage/redis_cluster.rs b/libs/session/storage/redis_cluster.rs new file mode 100644 index 0000000..829ea95 --- /dev/null +++ b/libs/session/storage/redis_cluster.rs @@ -0,0 +1,180 @@ +use std::sync::Arc; + +use actix_web::cookie::time::Duration; +use redis::Commands; +use redis::cluster::ClusterClient; +use tokio::task; + +use super::SessionKey; +use crate::storage::{ + SessionStore, + format::{deserialize_session_state, serialize_session_state}, + interface::{LoadError, SaveError, SessionState, UpdateError}, + utils::generate_session_key, +}; + +#[derive(Clone)] +pub struct RedisClusterSessionStore { + configuration: CacheConfiguration, + client: ClusterClient, +} + +#[derive(Clone)] +struct CacheConfiguration { + cache_keygen: Arc String + Send + Sync>, +} + +impl Default for CacheConfiguration { + fn default() -> Self { + Self { + cache_keygen: Arc::new(str::to_owned), + } + } +} + +impl RedisClusterSessionStore { + pub fn builder(connection_strings: Vec) -> RedisClusterSessionStoreBuilder { + RedisClusterSessionStoreBuilder { + configuration: CacheConfiguration::default(), + connection_strings, + } + } + + pub async fn new(connection_strings: Vec) -> anyhow::Result { + Self::builder(connection_strings).build().await + } + + fn get_connection(&self) -> anyhow::Result { + self.client.get_connection().map_err(|e| anyhow::anyhow!(e)) + } +} + +#[must_use] +pub struct RedisClusterSessionStoreBuilder { + configuration: CacheConfiguration, + connection_strings: Vec, +} + +impl RedisClusterSessionStoreBuilder { + pub fn cache_keygen(mut self, keygen: F) -> Self + where + F: Fn(&str) -> String + 'static + Send + Sync, + { + self.configuration.cache_keygen = Arc::new(keygen); + self + } + + pub async fn build(self) -> anyhow::Result { + let client = ClusterClient::new(self.connection_strings)?; + Ok(RedisClusterSessionStore { + configuration: self.configuration, + client, + }) + } +} + +impl SessionStore for RedisClusterSessionStore { + async fn load(&self, session_key: &SessionKey) -> Result, LoadError> { + let cache_key = self.configuration.cache_keygen.as_ref()(session_key.as_ref()); + let conn = self.get_connection().map_err(LoadError::Other)?; + + let value: Option = task::spawn_blocking(move || { + let mut conn = conn; + conn.get::<_, Option>(&cache_key) + }) + .await + .map_err(|_| LoadError::Other(anyhow::anyhow!("Task panicked")))? + .map_err(|e: redis::RedisError| LoadError::Other(anyhow::anyhow!(e)))?; + + match value { + None => Ok(None), + Some(value) => Ok(Some( + deserialize_session_state(&value).map_err(LoadError::Deserialization)?, + )), + } + } + + async fn save( + &self, + session_state: SessionState, + ttl: &Duration, + ) -> Result { + let body = serialize_session_state(&session_state).map_err(SaveError::Serialization)?; + let session_key = generate_session_key(); + let cache_key = self.configuration.cache_keygen.as_ref()(session_key.as_ref()); + let ttl_secs = ttl.whole_seconds() as u64; + let conn = self.get_connection().map_err(SaveError::Other)?; + + task::spawn_blocking(move || { + let mut conn = conn; + conn.set_ex::<_, _, ()>(&cache_key, &body, ttl_secs) + }) + .await + .map_err(|_| SaveError::Other(anyhow::anyhow!("Task panicked")))? + .map_err(|e: redis::RedisError| SaveError::Other(anyhow::anyhow!(e)))?; + + Ok(session_key) + } + + async fn update( + &self, + session_key: SessionKey, + session_state: SessionState, + ttl: &Duration, + ) -> Result { + let body = serialize_session_state(&session_state).map_err(UpdateError::Serialization)?; + let cache_key = self.configuration.cache_keygen.as_ref()(session_key.as_ref()); + let ttl_secs = ttl.whole_seconds() as u64; + let conn = self.get_connection().map_err(UpdateError::Other)?; + + let existed: bool = task::spawn_blocking(move || { + let mut conn = conn; + conn.set_ex::<_, _, bool>(&cache_key, &body, ttl_secs) + }) + .await + .map_err(|_| UpdateError::Other(anyhow::anyhow!("Task panicked")))? + .map_err(|e: redis::RedisError| UpdateError::Other(anyhow::anyhow!(e)))?; + + if !existed { + self.save(session_state, ttl) + .await + .map_err(|err| match err { + SaveError::Serialization(err) => UpdateError::Serialization(err), + SaveError::Other(err) => UpdateError::Other(err), + }) + } else { + Ok(session_key) + } + } + + async fn update_ttl(&self, session_key: &SessionKey, ttl: &Duration) -> anyhow::Result<()> { + let cache_key = self.configuration.cache_keygen.as_ref()(session_key.as_ref()); + let ttl_secs = ttl.whole_seconds() as i64; + let conn = self.get_connection()?; + + task::spawn_blocking(move || { + let mut conn = conn; + conn.expire::<_, bool>(&cache_key, ttl_secs) + }) + .await + .map_err(|_| anyhow::anyhow!("Task panicked"))? + .map_err(|e: redis::RedisError| anyhow::anyhow!(e))?; + + Ok(()) + } + + async fn delete(&self, session_key: &SessionKey) -> Result<(), anyhow::Error> { + let cache_key = self.configuration.cache_keygen.as_ref()(session_key.as_ref()); + let conn = self.get_connection()?; + + task::spawn_blocking(move || { + let mut conn = conn; + conn.del::<_, i64>(&cache_key) + }) + .await + .map_err(|_| anyhow::anyhow!("Task panicked"))? + .map_err(|e: redis::RedisError| anyhow::anyhow!(e))?; + + Ok(()) + } +} diff --git a/libs/session/storage/session_key.rs b/libs/session/storage/session_key.rs new file mode 100644 index 0000000..0704df4 --- /dev/null +++ b/libs/session/storage/session_key.rs @@ -0,0 +1,48 @@ +use derive_more::derive::{Display, From}; + +#[derive(Debug, PartialEq, Eq)] +pub struct SessionKey(String); + +impl TryFrom for SessionKey { + type Error = InvalidSessionKeyError; + + fn try_from(val: String) -> Result { + if val.len() > 4064 { + return Err(anyhow::anyhow!( + "The session key is bigger than 4064 bytes, the upper limit on cookie content." + ) + .into()); + } + + if val.contains('\0') { + return Err(anyhow::anyhow!( + "The session key contains null bytes which are not allowed." + ) + .into()); + } + + Ok(SessionKey(val)) + } +} + +impl AsRef for SessionKey { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl From for String { + fn from(key: SessionKey) -> Self { + key.0 + } +} + +#[derive(Debug, Display, From)] +#[display("The provided string is not a valid session key")] +pub struct InvalidSessionKeyError(anyhow::Error); + +impl std::error::Error for InvalidSessionKeyError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + Some(self.0.as_ref()) + } +} diff --git a/libs/session/storage/utils.rs b/libs/session/storage/utils.rs new file mode 100644 index 0000000..f2725c6 --- /dev/null +++ b/libs/session/storage/utils.rs @@ -0,0 +1,10 @@ +use rand::distr::{Alphanumeric, SampleString as _}; + +use crate::storage::SessionKey; + +pub fn generate_session_key() -> SessionKey { + Alphanumeric + .sample_string(&mut rand::rng(), 64) + .try_into() + .unwrap() +} diff --git a/libs/transport/Cargo.toml b/libs/transport/Cargo.toml new file mode 100644 index 0000000..7a2fc82 --- /dev/null +++ b/libs/transport/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "transport" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "transport" +[dependencies] + +[lints] +workspace = true diff --git a/libs/transport/lib.rs b/libs/transport/lib.rs new file mode 100644 index 0000000..b93cf3f --- /dev/null +++ b/libs/transport/lib.rs @@ -0,0 +1,14 @@ +pub fn add(left: u64, right: u64) -> u64 { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/libs/webhook/Cargo.toml b/libs/webhook/Cargo.toml new file mode 100644 index 0000000..d7c27eb --- /dev/null +++ b/libs/webhook/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "webhook" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +readme.workspace = true +homepage.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +documentation.workspace = true +[lib] +path = "lib.rs" +name = "webhook" +[dependencies] + +[lints] +workspace = true diff --git a/libs/webhook/lib.rs b/libs/webhook/lib.rs new file mode 100644 index 0000000..b93cf3f --- /dev/null +++ b/libs/webhook/lib.rs @@ -0,0 +1,14 @@ +pub fn add(left: u64, right: u64) -> u64 { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/openapi-ts.config.ts b/openapi-ts.config.ts new file mode 100644 index 0000000..96deb3d --- /dev/null +++ b/openapi-ts.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from '@hey-api/openapi-ts'; +import type { UserConfig } from '@hey-api/openapi-ts'; + +export default defineConfig({ + input: 'openapi.json', + output: 'src/client', + client: '@hey-api/client-axios', +} as UserConfig); diff --git a/openapi.json b/openapi.json new file mode 100644 index 0000000..772ddd8 --- /dev/null +++ b/openapi.json @@ -0,0 +1,41489 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "api", + "description": "", + "license": { + "name": "" + }, + "version": "0.2.9" + }, + "paths": { + "/api/agents/capabilities": { + "post": { + "tags": [ + "Agent" + ], + "operationId": "model_capability_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateModelCapabilityRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelCapabilityResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + } + } + } + }, + "/api/agents/capabilities/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_capability_get", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelCapabilityResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "model_capability_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "model_capability_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateModelCapabilityRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelCapabilityResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/code-review/{namespace}/{repo}": { + "post": { + "tags": [ + "Agent" + ], + "operationId": "trigger_code_review", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TriggerCodeReviewRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "AI code review triggered", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TriggerCodeReviewResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Repository or PR not found" + } + } + } + }, + "/api/agents/models": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_list", + "parameters": [ + { + "name": "provider_id", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModelResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "Agent" + ], + "operationId": "model_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateModelRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/models/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_get", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "model_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "model_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateModelRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/parameters": { + "post": { + "tags": [ + "Agent" + ], + "operationId": "model_parameter_profile_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateModelParameterProfileRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelParameterProfileResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + } + } + } + }, + "/api/agents/parameters/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_parameter_profile_get", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelParameterProfileResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "model_parameter_profile_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "model_parameter_profile_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateModelParameterProfileRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelParameterProfileResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/pr-description/{namespace}/{repo}": { + "post": { + "tags": [ + "Agent" + ], + "operationId": "generate_pr_description", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GeneratePrDescriptionRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "AI-generated PR description", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GeneratePrDescriptionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Repository or PR not found" + } + } + } + }, + "/api/agents/pricing": { + "post": { + "tags": [ + "Agent" + ], + "operationId": "model_pricing_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateModelPricingRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelPricingResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + } + } + } + }, + "/api/agents/pricing/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_pricing_get", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelPricingResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "model_pricing_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "model_pricing_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateModelPricingRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelPricingResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/providers": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "provider_list", + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProviderResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "Agent" + ], + "operationId": "provider_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateProviderRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + } + } + } + }, + "/api/agents/providers/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "provider_get", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Provider UUID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "provider_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "provider_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateProviderRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/versions": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_version_list", + "parameters": [ + { + "name": "model_id", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModelVersionResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "Agent" + ], + "operationId": "model_version_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateModelVersionRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelVersionResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + } + } + } + }, + "/api/agents/versions/{id}": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_version_get", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelVersionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "" + } + } + }, + "delete": { + "tags": [ + "Agent" + ], + "operationId": "model_version_delete", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "" + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + }, + "patch": { + "tags": [ + "Agent" + ], + "operationId": "model_version_update", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateModelVersionRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModelVersionResponse" + } + } + } + }, + "401": { + "description": "" + }, + "403": { + "description": "" + }, + "404": { + "description": "" + } + } + } + }, + "/api/agents/versions/{model_version_id}/capabilities": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_capability_list", + "parameters": [ + { + "name": "model_version_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModelCapabilityResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/agents/versions/{model_version_id}/parameters": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_parameter_profile_list", + "parameters": [ + { + "name": "model_version_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModelParameterProfileResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/agents/versions/{model_version_id}/pricing": { + "get": { + "tags": [ + "Agent" + ], + "operationId": "model_pricing_list", + "parameters": [ + { + "name": "model_version_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModelPricingResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/auth/2fa/disable": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_2fa_disable", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Disable2FAParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "2FA disabled" + }, + "400": { + "description": "2FA not enabled or invalid code/password" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error" + } + } + } + }, + "/api/auth/2fa/enable": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_2fa_enable", + "responses": { + "200": { + "description": "2FA setup initiated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Enable2FAResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "409": { + "description": "2FA already enabled" + }, + "500": { + "description": "Internal server error" + } + } + } + }, + "/api/auth/2fa/status": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_2fa_status", + "responses": { + "200": { + "description": "2FA status", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Get2FAStatusResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error" + } + } + } + }, + "/api/auth/2fa/verify": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_2fa_verify", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Verify2FAParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "2FA verified and enabled" + }, + "400": { + "description": "2FA not set up" + }, + "401": { + "description": "Unauthorized or invalid code" + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error" + } + } + } + }, + "/api/auth/captcha": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_auth_captcha", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CaptchaQuery" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Captcha generated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CaptchaResponse" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/email": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_email_get", + "responses": { + "200": { + "description": "Current email address", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_EmailResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/auth/email/change": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_email_change", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmailChangeRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Verification email sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "401": { + "description": "Unauthorized or invalid password" + }, + "409": { + "description": "Email already in use" + } + } + } + }, + "/api/auth/email/verify": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_email_verify", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmailVerifyRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Email updated successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "400": { + "description": "Invalid or expired token" + } + } + } + }, + "/api/auth/login": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_auth_login", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LoginParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Login successful", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "401": { + "description": "Invalid credentials", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "428": { + "description": "Two-factor authentication required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/logout": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_auth_logout", + "responses": { + "200": { + "description": "Logout successful", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/me": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_auth_me", + "responses": { + "200": { + "description": "Current user info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ContextMe" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/password/change": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_user_change_password", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChangePasswordParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Password changed successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "401": { + "description": "Unauthorized or invalid password", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/password/reset": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_user_request_password_reset", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ResetPasswordParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Password reset email sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "User not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/auth/register": { + "post": { + "tags": [ + "Auth" + ], + "operationId": "api_auth_register", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RegisterParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Registration successful", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "409": { + "description": "Username or email already exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/git/init": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_init_bare", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GitInitRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Bare repository initialized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_GitInitResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/git/is-repo/{path}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_is_repo", + "parameters": [ + { + "name": "path", + "in": "path", + "description": "Repository path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if path is a repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_bool" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/git/open/{path}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_open", + "parameters": [ + { + "name": "path", + "in": "path", + "description": "Repository path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Open repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_GitInitResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/git/open/{path}/workdir": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_open_workdir", + "parameters": [ + { + "name": "path", + "in": "path", + "description": "Repository path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Open repository working directory", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_GitInitResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/issue/{project}/issues": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "state", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issues", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_create", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/summary": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_summary", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get issue summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_get", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_delete", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Issues" + ], + "operationId": "issue_update", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/assignees": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_assignee_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue assignees", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_IssueAssigneeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_assignee_add", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueAssignUserRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add assignee to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueAssigneeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/assignees/{assignee_id}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_assignee_remove", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "assignee_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Remove assignee from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/close": { + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_close", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Close issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/comments": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue comments", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueCommentListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_create", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueCommentCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create issue comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/comments/{comment_id}": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_get", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get issue comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_delete", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete issue comment" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_update", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueCommentUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update issue comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/comments/{comment_id}/reactions": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_reaction_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List comment reactions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReactionListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_reaction_add", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReactionAddRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add reaction to comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReactionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/comments/{comment_id}/reactions/{reaction}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_comment_reaction_remove", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "reaction", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Remove reaction from comment" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/labels": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_label_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue labels", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_IssueLabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_label_add", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueAddLabelRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add label to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueLabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/labels/{label_id}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_label_remove", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "label_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Remove label from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/pulls": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_pull_request_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue pull requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_IssuePullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_pull_request_link", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueLinkPullRequestRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Link pull request to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssuePullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/pulls/{repo_id}/{pr_number}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_pull_request_unlink", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "repo_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Unlink pull request from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/reactions": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_reaction_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue reactions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReactionListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_reaction_add", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReactionAddRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add reaction to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReactionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/reactions/{reaction}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_reaction_remove", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "reaction", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Remove reaction from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/reopen": { + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_reopen", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Reopen issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/repos": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_repo_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue repos", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_IssueRepoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_repo_link", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IssueLinkRepoRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Link repo to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueRepoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/repos/{repo_id}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_repo_unlink", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "repo_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unlink repo from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/subscribe": { + "post": { + "tags": [ + "Issues" + ], + "operationId": "issue_subscribe", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Subscribe to issue", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IssueSubscriberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Issues" + ], + "operationId": "issue_unsubscribe", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Unsubscribe from issue" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/issues/{number}/subscribers": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "issue_subscriber_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List issue subscribers", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_IssueSubscriberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/labels": { + "get": { + "tags": [ + "Issues" + ], + "operationId": "label_list", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List labels", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_LabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Issues" + ], + "operationId": "label_create", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateLabelRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create label", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_LabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/issue/{project}/labels/{label_id}": { + "delete": { + "tags": [ + "Issues" + ], + "operationId": "label_delete", + "parameters": [ + { + "name": "project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "label_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete label" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/me/mentions": { + "get": { + "tags": [ + "Room" + ], + "operationId": "mention_list", + "parameters": [ + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List mentions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_MentionNotificationResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/me/mentions/read-all": { + "post": { + "tags": [ + "Room" + ], + "operationId": "mention_read_all", + "responses": { + "200": { + "description": "Mark all mentions as read" + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/me/notifications": { + "get": { + "tags": [ + "Room" + ], + "operationId": "notification_list", + "parameters": [ + { + "name": "only_unread", + "in": "query", + "required": false, + "schema": { + "type": "boolean" + } + }, + { + "name": "archived", + "in": "query", + "required": false, + "schema": { + "type": "boolean" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List notifications", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_NotificationListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/me/notifications/read-all": { + "post": { + "tags": [ + "Room" + ], + "operationId": "notification_mark_all_read", + "responses": { + "200": { + "description": "Mark all notifications as read" + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/me/notifications/{notification_id}/archive": { + "post": { + "tags": [ + "Room" + ], + "operationId": "notification_archive", + "parameters": [ + { + "name": "notification_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Archive notification" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/me/notifications/{notification_id}/read": { + "post": { + "tags": [ + "Room" + ], + "operationId": "notification_mark_read", + "parameters": [ + { + "name": "notification_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Mark notification as read" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/project_room/{project_name}/room-categories": { + "get": { + "tags": [ + "Room" + ], + "operationId": "category_list", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List room categories", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomCategoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Room" + ], + "operationId": "category_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomCategoryCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create room category", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomCategoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/project_room/{project_name}/rooms": { + "get": { + "tags": [ + "Room" + ], + "operationId": "room_list", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "only_public", + "in": "query", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "List rooms", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Room" + ], + "operationId": "room_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create room", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects": { + "post": { + "tags": [ + "Project" + ], + "operationId": "project_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProjectInitParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectInitResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/me/invitations": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_my_invitations", + "responses": { + "200": { + "description": "List my invitations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_InvitationListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/me/join-requests": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_my_join_requests", + "responses": { + "200": { + "description": "List my join requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_JoinRequestListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_info", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get project info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectInfoRelational" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/activities": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_activities", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "event_type", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "start_date", + "in": "query", + "description": "ISO 8601 datetime, e.g. 2025-01-01T00:00:00Z", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "end_date", + "in": "query", + "description": "ISO 8601 datetime, e.g. 2025-12-31T23:59:59Z", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List project activities", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ActivityLogListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden — no access to this project" + }, + "404": { + "description": "Project not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_log_activity", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ActivityLogParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Activity logged", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ActivityLogResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Project not found" + } + } + } + }, + "/api/projects/{project_name}/audit-logs": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_audit_logs", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List project audit logs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_AuditLogResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_log_audit", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AuditLogParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Log project audit event", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_AuditLogResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/audit-logs/{log_id}": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_audit_log", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "log_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get project audit log", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_AuditLogResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/billing": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_billing", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get project billing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectBillingCurrentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/billing/history": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_billing_history", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get project billing history", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectBillingHistoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/boards": { + "get": { + "tags": [ + "Project" + ], + "operationId": "board_list", + "parameters": [ + { + "name": "project_name", + "in": "path", + "description": "Project name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List boards", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_BoardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "board_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateBoardParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create board", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BoardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/projects/{project_name}/boards/{board_id}": { + "get": { + "tags": [ + "Project" + ], + "operationId": "board_get", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "board_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Get board with columns and cards", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BoardWithColumnsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Project" + ], + "operationId": "board_delete", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "board_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete board" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "board_update", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "board_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateBoardParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update board", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BoardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/boards/{board_id}/columns": { + "post": { + "tags": [ + "Project" + ], + "operationId": "column_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "board_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateColumnParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create column", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ColumnResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Board not found" + } + } + } + }, + "/api/projects/{project_name}/cards": { + "post": { + "tags": [ + "Project" + ], + "operationId": "card_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateCardParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create card", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/projects/{project_name}/cards/{card_id}": { + "delete": { + "tags": [ + "Project" + ], + "operationId": "card_delete", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "card_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete card" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "card_update", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "card_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateCardParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update card", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/cards/{card_id}/move": { + "post": { + "tags": [ + "Project" + ], + "operationId": "card_move", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "card_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MoveCardParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Move card", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/columns/{column_id}": { + "delete": { + "tags": [ + "Project" + ], + "operationId": "column_delete", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "column_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete column" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "column_update", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "column_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateColumnParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update column", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ColumnResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/invitations": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_invitations", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List project invitations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_InvitationListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_invite_user", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InviteUserRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Invite user to project" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/invitations/accept": { + "post": { + "tags": [ + "Project" + ], + "operationId": "project_accept_invitation", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Accept project invitation" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/invitations/reject": { + "post": { + "tags": [ + "Project" + ], + "operationId": "project_reject_invitation", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Reject project invitation" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/invitations/{user_id}": { + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_cancel_invitation", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Cancel project invitation" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/join-requests": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_join_requests", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "status", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List join requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_JoinRequestListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_submit_join_request", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SubmitJoinRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Submit join request" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/join-requests/{request_id}": { + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_cancel_join_request", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "request_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Cancel join request" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_process_join_request", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "request_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessJoinRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Process join request" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/join-requests/{request_id}/answers": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_join_answers", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "request_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get join request answers", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_JoinAnswersListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_submit_join_answers", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "request_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AnswerRequest" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Submit join request answers" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/join-settings": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_join_settings", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get join settings", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_JoinSettingsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_update_join_settings", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateJoinSettingsRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update join settings", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_JoinSettingsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/labels": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_labels", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List project labels", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_LabelListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_create_label", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateLabelParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create project label", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_LabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/labels/{label_id}": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_get_label", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "label_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get project label", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_LabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_delete_label", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "label_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete project label" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_update_label", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "label_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateLabelParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update project label", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_LabelResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/like": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_is_like", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if user likes project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IsLikeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_like", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Like project" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_unlike", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unlike project" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/likes/count": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_likes_count", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get like count" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/likes/users": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_like_users", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List users who liked project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_LikeUserInfo" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/members": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_members", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List project members", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MemberListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/members/role": { + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_update_member_role", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateMemberRoleRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update member role" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/members/{user_id}": { + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_remove_member", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Remove member from project" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/repos": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_repos", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get project repositories", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectRepositoryPagination" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Project not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_repo_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProjectRepoCreateParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create a repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProjectRepoCreateResponse" + } + } + } + }, + "400": { + "description": "Bad request" + }, + "401": { + "description": "Unauthorized" + }, + "409": { + "description": "Repository name already exists" + } + } + } + }, + "/api/projects/{project_name}/settings/name": { + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_exchange_name", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExchangeProjectName" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update project name" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/settings/title": { + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_exchange_title", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExchangeProjectTitle" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update project title" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/settings/visibility": { + "patch": { + "tags": [ + "Project" + ], + "operationId": "project_exchange_visibility", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExchangeProjectVisibility" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update project visibility" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/skills": { + "get": { + "tags": [ + "Skill" + ], + "operationId": "skill_list", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List skills", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_SkillResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Project not found" + } + } + }, + "post": { + "tags": [ + "Skill" + ], + "operationId": "skill_create", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSkillRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create skill", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SkillResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "409": { + "description": "Skill already exists" + } + } + } + }, + "/api/projects/{project_name}/skills/scan": { + "post": { + "tags": [ + "Skill" + ], + "operationId": "skill_scan", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Scan repos for skills", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ScanResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/projects/{project_name}/skills/{slug}": { + "get": { + "tags": [ + "Skill" + ], + "operationId": "skill_get", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get skill", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SkillResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Skill" + ], + "operationId": "skill_delete", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete skill", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DeleteSkillResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Skill" + ], + "operationId": "skill_update", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSkillRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update skill", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SkillResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/watch": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_is_watch", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if user watches project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_IsWatchResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Project" + ], + "operationId": "project_watch", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Watch project" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Project" + ], + "operationId": "project_unwatch", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unwatch project" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/watches/count": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_watches_count", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get watch count" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{project_name}/watches/users": { + "get": { + "tags": [ + "Project" + ], + "operationId": "project_watch_users", + "parameters": [ + { + "name": "project_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List users watching project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_WatchUserInfo" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/projects/{source_project}/repos/{repo_name}/transfer": { + "post": { + "tags": [ + "Project" + ], + "operationId": "project_transfer_repo", + "parameters": [ + { + "name": "source_project", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo_name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TransferRepoParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Transfer repo to another project", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TransferRepoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "status", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List pull requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PullRequestCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/summary": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_summary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get pull request summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{number}": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete pull request" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PullRequestUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{number}/close": { + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_close", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Close pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{number}/reopen": { + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "pull_request_reopen", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Reopen pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PullRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "path", + "in": "query", + "description": "Filter by file path", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "resolved", + "in": "query", + "description": "Filter by resolved status", + "required": false, + "schema": { + "type": "boolean" + } + }, + { + "name": "file_only", + "in": "query", + "description": "Only inline comments (true) or only general comments (false)", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "List pull request review comments", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewCommentCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create pull request review comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}": { + "delete": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete pull request review comment" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewCommentUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update pull request review comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}/replies": { + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_reply", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewCommentReplyRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Reply to a comment", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/comments/{comment_id}/resolve": { + "put": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_resolve", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Mark comment as resolved", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "PullRequest" + ], + "operationId": "review_comment_unresolve", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "comment_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Mark comment as unresolved", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewCommentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/commits": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "pr_commits_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "description": "Pull request number", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List commits in a pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PrCommitsListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/conflicts": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "merge_conflict_check", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Check merge conflicts", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MergeConflictResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/diff/side-by-side": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "pr_diff_side_by_side", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "description": "Pull request number", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Side-by-side diff for a pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SideBySideDiffResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "merge_analysis", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get merge analysis", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MergeAnalysisResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "merge_execute", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MergeRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Execute merge", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MergeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + }, + "409": { + "description": "Conflict" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/abort": { + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "merge_abort", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Abort merge" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/merge/in_progress": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "merge_is_in_progress", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Check if merge is in progress" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "review_request_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List review requests for a pull request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewRequestListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "review_request_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewRequestCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create or update a review request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests/{reviewer}": { + "delete": { + "tags": [ + "PullRequest" + ], + "operationId": "review_request_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "reviewer", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete (cancel) a review request" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/review-requests/{reviewer}/dismiss": { + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "review_request_dismiss", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "reviewer", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Dismiss a review request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewRequestResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews": { + "get": { + "tags": [ + "PullRequest" + ], + "operationId": "review_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List pull request reviews", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "PullRequest" + ], + "operationId": "review_submit", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewSubmitRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Submit pull request review", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "PullRequest" + ], + "operationId": "review_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReviewUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update pull request review", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ReviewResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repo_pr/{namespace}/{repo}/pulls/{pr_number}/reviews/{reviewer_id}": { + "delete": { + "tags": [ + "PullRequest" + ], + "operationId": "review_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "reviewer_id", + "in": "path", + "description": "Reviewer UUID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete pull request review" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repos/{namespace}/{repo}/branch-protections": { + "get": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List branch protection rules", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_BranchProtectionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchProtectionCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create a branch protection rule", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchProtectionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/branch-protections/check-approvals": { + "get": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_check_approvals", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "pr_number", + "in": "query", + "description": "Pull request number", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Check approval count against branch protection", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApprovalCheckResult" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/branch-protections/{id}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "id", + "in": "path", + "description": "Rule id", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get a branch protection rule", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchProtectionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "id", + "in": "path", + "description": "Rule id", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete a branch protection rule" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "patch": { + "tags": [ + "Git" + ], + "operationId": "branch_protection_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "id", + "in": "path", + "description": "Rule id", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchProtectionUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update a branch protection rule", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchProtectionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_update_repo", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GitUpdateRepoRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update repository settings" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "prefix", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "max_depth", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "path_filter", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get archive", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive/cached": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive_cached", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "prefix", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "max_depth", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "path_filter", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if archive is cached", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveCachedResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive/invalidate": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive_invalidate", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "prefix", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "max_depth", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "path_filter", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Invalidate archive cache", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveInvalidateResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive/invalidate/{commit_oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive_invalidate_all", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Invalidate all archive caches for commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveInvalidateAllResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive/list": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "prefix", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "max_depth", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "path_filter", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List archive entries", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/archive/summary": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_archive_summary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "prefix", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "max_depth", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "path_filter", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get archive summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ArchiveSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blame/{commit_oid}/{tail:.*}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blame_file", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit_oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "tail", + "in": "path", + "description": "File path within the repository", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BlameHunkResponse" + } + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_blob_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BlobCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create blob", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobCreateResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob/{oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blob_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Blob object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get blob info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob/{oid}/content": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blob_content", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Blob object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get blob content", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobContentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob/{oid}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blob_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Blob object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check blob exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob/{oid}/is-binary": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blob_is_binary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Blob object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if blob is binary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobIsBinaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/blob/{oid}/size": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_blob_size", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Blob object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get blob size", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BlobSizeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List branches", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "git_branch_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/current": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_current", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get current branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/diff": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_diff", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get branch diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchDiffResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/fast-forward/{target}": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_branch_fast_forward", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "target", + "in": "path", + "description": "Target branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Fast-forward branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchFastForwardResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/is-ancestor": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_is_ancestor", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if branch is ancestor", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchIsAncestorResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/is-conflicted": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_is_conflicted", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if branch has conflicts", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchIsConflictedResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/is-detached": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_is_detached", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if HEAD is detached", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchIsDetachedResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/is-merged": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_is_merged", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if branch is merged", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchIsMergedResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/merge-base": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_merge_base", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get merge base", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchMergeBaseResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/move": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_branch_move", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchMoveRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Move branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/remote/{name}": { + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_branch_delete_remote", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Remote branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete remote branch" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/rename": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_branch_rename", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchRenameRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Rename branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/summary": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_summary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get branch summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/upstream": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_branch_set_upstream", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BranchSetUpstreamRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Set upstream branch" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/{name}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_branch_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete branch" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/{name}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check branch exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/{name}/is-head": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_is_head", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if branch is HEAD", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchIsHeadResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/{name}/tracking-difference": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_tracking_difference", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tracking difference", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchTrackingDiffResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/branches/{name}/upstream": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_branch_upstream", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Branch name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get upstream branch", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_BranchInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_log", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "rev", + "in": "query", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Get commit log (paginated)", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitLogResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "git_commit_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitCreateResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/branches": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_branches", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit branches", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitBranchesResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit count", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/graph": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_graph", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit graph", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitGraphResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/graph-react": { + "get": { + "tags": [ + "Git" + ], + "summary": "Returns commit graph data enriched with full commit metadata (author, timestamp,\nparents, lane_index) for use with @gitgraph/react on the frontend.", + "operationId": "git_commit_graph_react", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit graph for gitgraph-react", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitGraphReactResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/reflog": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_reflog", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit reflog", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_CommitReflogEntryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/resolve": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_resolve_rev", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Resolve revision to commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/tags": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_tags", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit tags", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitTagsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/walk": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_walk", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Walk commits", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit metadata", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/amend": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_commit_amend", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitAmendRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Amend commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/ancestors": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_ancestors", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit ancestors", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/author": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_author", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit author", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitAuthorResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/cherry-pick": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_commit_cherry_pick", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitCherryPickRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Cherry-pick commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/cherry-pick/abort": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_commit_cherry_pick_abort", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitCherryPickAbortRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Abort cherry-pick", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_bool" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/descendants": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_descendants", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit descendants", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if commit exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/first-parent": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_first_parent", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit first parent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Option_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-commit": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_is_commit", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if object is a commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitIsCommitResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-merge": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_is_merge", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if commit is a merge", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitIsMergeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/is-tip": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_is_tip", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if commit is a tip", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitIsTipResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/message": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_message", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMessageResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent-count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_parent_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit parent count", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitParentCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent-ids": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_parent_ids", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit parent IDs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitParentIdsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/parent/{index}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_parent", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "index", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "minimum": 0 + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit parent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/ref-count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_ref_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit ref count", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitRefCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/refs": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_refs", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit refs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_CommitRefInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/revert": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_commit_revert", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitRevertRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Revert commit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitMetaResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/revert/abort": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_commit_revert_abort", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommitRevertAbortRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Abort revert", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_bool" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/short-id": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_short_id", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit short ID", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitShortIdResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/summary": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_summary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/commits/{oid}/tree-id": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_commit_tree_id", + "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get commit tree ID", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_CommitTreeIdResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/config/entries": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_config_entries", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List repository config entries", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ConfigSnapshotResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/config/{key}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_config_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "key", + "in": "path", + "description": "Config key", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get repository config value", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "put": { + "tags": [ + "Git" + ], + "operationId": "git_config_set", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "key", + "in": "path", + "description": "Config key", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConfigSetRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Set repository config value" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_config_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "key", + "in": "path", + "description": "Config key", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete repository config key" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/config/{key}/has": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_config_has", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "key", + "in": "path", + "description": "Config key", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if repository config key exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ConfigBoolResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/contributors": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_contributors", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of contributors", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ContributorsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/description": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_description_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get repository description", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DescriptionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "put": { + "tags": [ + "Git" + ], + "operationId": "git_description_set", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DescriptionQuery" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Set repository description", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DescriptionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_description_reset", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Reset repository description", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DescriptionResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/description/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_description_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if repository description exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_tree_to_tree", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "old_tree", + "in": "query", + "description": "Old tree OID (commit or tree SHA)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "new_tree", + "in": "query", + "description": "New tree OID (commit or tree SHA)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Tree to tree diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffResultResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/commit/{commit}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_commit_to_workdir", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit", + "in": "path", + "description": "Commit identifier", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Commit to workdir diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffResultResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/commit/{commit}/index": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_commit_to_index", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit", + "in": "path", + "description": "Commit identifier", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Commit to index diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffResultResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/index": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_index_to_tree", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Index to tree diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffResultResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/patch-id": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_patch_id", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Patch ID", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffPatchIdResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/side-by-side": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_side_by_side", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Side-by-side diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SideBySideDiffResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/stats": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_stats", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Diff statistics", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffStatsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/diff/workdir": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_diff_workdir_to_index", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Workdir to index diff", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_DiffResultResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/abort": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_merge_abort", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Abort an in-progress merge" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/analysis/{ref_name}/{their_oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_merge_analysis_for_ref", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "ref_name", + "in": "path", + "description": "Reference name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "their_oid", + "in": "path", + "description": "The OID to analyze merge against", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Perform merge analysis for a specific ref", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MergeAnalysisResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/analysis/{their_oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_merge_analysis", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "their_oid", + "in": "path", + "description": "The OID to analyze merge against", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Perform merge analysis", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MergeAnalysisResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/base/{oid1}/{oid2}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_merge_base", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid1", + "in": "path", + "description": "First commit OID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid2", + "in": "path", + "description": "Second commit OID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get merge base of two commits", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_String" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/commits": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_merge_commits", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MergeCommitsRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Merge commits" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/heads": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_mergehead_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List merge heads", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_MergeheadInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/in-progress": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_merge_is_in_progress", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if merge is in progress", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/is-conflicted": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_merge_is_conflicted", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if merge has conflicts", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/merge/trees": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_merge_trees", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MergeTreesRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Merge trees" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/readme": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_readme", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "ref", + "in": "query", + "description": "Git reference (branch, tag, commit). Defaults to HEAD.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get README content", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_GitReadmeResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/refs": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_ref_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of refs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RefInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "put": { + "tags": [ + "Git" + ], + "operationId": "git_ref_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RefUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Ref updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefUpdateResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "git_ref_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RefCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Ref created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefUpdateResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/refs/rename": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_ref_rename", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Ref renamed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/refs/{name}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_ref_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Ref name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Ref info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_ref_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Ref name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Ref deleted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefDeleteResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/refs/{name}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_ref_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Ref name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Ref exists check", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/refs/{name}/target": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_ref_target", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Ref name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Ref target", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RefTargetResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/star": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_star", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Star the repository" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_unstar", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unstar the repository" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/star/count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_star_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get star count for the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_StarCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/star/is-starred": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_is_starred", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if the current user has starred the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/star/users": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_star_user_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "per_page", + "in": "query", + "description": "Items per page", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List users who starred the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_StarUserListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List all tags", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "git_tag_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create an annotated tag", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tag count", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/lightweight": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_tag_create_lightweight", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagCreateLightweightRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create a lightweight tag", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/message": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_tag_update_message", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagUpdateMessageRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update tag message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/names": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_list_names", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List all tag names", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_String" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/rename": { + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_tag_rename", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagRenameQuery" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Rename a tag", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/summary": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_summary", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tag summary", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagSummaryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get a tag by name", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_tag_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete a tag" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if a tag exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}/is-annotated": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_is_annotated", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if a tag is annotated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagIsAnnotatedResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}/message": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_message", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tag message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagMessageResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}/tagger": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_tagger", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tag tagger info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagTaggerResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tags/{name}/target": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tag_target", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "name", + "in": "path", + "description": "Tag name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tag target OID", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TagTargetResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/diff-stats": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_diffstats", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tree diff stats", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeDiffStatsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{commit}/commit-entry-by-path": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_entry_by_commit_path", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "commit", + "in": "path", + "description": "Commit OID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tree entry by commit path", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeEntryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tree info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry-by-path": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_entry_by_path", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tree entry by path", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeEntryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry-count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_entry_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get tree entry count", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeEntryCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/entry/{index}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_entry", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "index", + "in": "path", + "description": "Entry index", + "required": true, + "schema": { + "type": "integer", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Get tree entry by index", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeEntryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/exists": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_exists", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if tree exists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeExistsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/is-empty": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_is_empty", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if tree is empty", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_TreeIsEmptyResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/tree/{oid}/list": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_tree_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "oid", + "in": "path", + "description": "Tree object ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List tree entries", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_TreeEntryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/watch": { + "post": { + "tags": [ + "Git" + ], + "operationId": "git_watch", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GitWatchRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Watch the repository" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_unwatch", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unwatch the repository" + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/watch/count": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_watch_count", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get watch count for the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WatchCountResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/watch/is-watched": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_is_watched", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if the current user is watching the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Value" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/watch/users": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_watch_user_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Project namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "per_page", + "in": "query", + "description": "Items per page", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List users who are watching the repository", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WatchUserListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ApiError" + } + } + } + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/webhooks": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_webhook_list", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List webhooks", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WebhookListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Git" + ], + "operationId": "git_webhook_create", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateWebhookParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create webhook", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WebhookResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/repos/{namespace}/{repo}/git/webhooks/{webhook_id}": { + "get": { + "tags": [ + "Git" + ], + "operationId": "git_webhook_get", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "webhook_id", + "in": "path", + "description": "Webhook ID", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get webhook", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WebhookResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Git" + ], + "operationId": "git_webhook_delete", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "webhook_id", + "in": "path", + "description": "Webhook ID", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete webhook" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Git" + ], + "operationId": "git_webhook_update", + "parameters": [ + { + "name": "namespace", + "in": "path", + "description": "Repository namespace", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "webhook_id", + "in": "path", + "description": "Webhook ID", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateWebhookParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update webhook", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WebhookResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/room-categories/{category_id}": { + "delete": { + "tags": [ + "Room" + ], + "operationId": "category_delete", + "parameters": [ + { + "name": "category_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete room category" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Room" + ], + "operationId": "category_update", + "parameters": [ + { + "name": "category_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomCategoryUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update room category", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomCategoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}": { + "get": { + "tags": [ + "Room" + ], + "operationId": "room_get", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Get room", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Room" + ], + "operationId": "room_delete", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete room" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "Room" + ], + "operationId": "room_update", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update room", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/ai": { + "get": { + "tags": [ + "Room" + ], + "operationId": "ai_list", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "List room AI configurations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomAiResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "put": { + "tags": [ + "Room" + ], + "operationId": "ai_upsert", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomAiUpsertRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Upsert room AI configuration", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomAiResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/ai/{model_id}": { + "delete": { + "tags": [ + "Room" + ], + "operationId": "ai_delete", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "model_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Delete room AI configuration" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/members": { + "get": { + "tags": [ + "Room" + ], + "operationId": "member_list", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "List room members", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomMemberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Room" + ], + "operationId": "member_add", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomMemberAddRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add room member", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMemberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/members/me/read-seq": { + "patch": { + "tags": [ + "Room" + ], + "operationId": "member_set_read_seq", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomMemberReadSeqRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Set member read sequence", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMemberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/members/{user_id}": { + "delete": { + "tags": [ + "Room" + ], + "operationId": "member_remove", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Remove room member" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/members/{user_id}/role": { + "patch": { + "tags": [ + "Room" + ], + "operationId": "member_update_role", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomMemberRoleUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update member role", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMemberResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages": { + "post": { + "tags": [ + "Room" + ], + "operationId": "message_create", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomMessageCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create room message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMessageResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages/search": { + "get": { + "tags": [ + "Room" + ], + "operationId": "message_search", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "q", + "in": "query", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "offset", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Search messages", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MessageSearchResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages/{message_id}": { + "patch": { + "tags": [ + "Room" + ], + "operationId": "message_update", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "message_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomMessageUpdateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update room message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMessageResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages/{message_id}/edit-history": { + "get": { + "tags": [ + "Room" + ], + "operationId": "message_edit_history", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "message_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Get message edit history", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_MessageEditHistoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages/{message_id}/pin": { + "post": { + "tags": [ + "Room" + ], + "operationId": "pin_add", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "message_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Add room pin", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomPinResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "Room" + ], + "operationId": "pin_remove", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "message_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Remove room pin" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/messages/{message_id}/revoke": { + "post": { + "tags": [ + "Room" + ], + "operationId": "message_revoke", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "message_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Revoke room message", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMessageResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/pins": { + "get": { + "tags": [ + "Room" + ], + "operationId": "pin_list", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "List room pins", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomPinResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/threads": { + "get": { + "tags": [ + "Room" + ], + "operationId": "thread_list", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "List room threads", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_RoomThreadResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "post": { + "tags": [ + "Room" + ], + "operationId": "thread_create", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RoomThreadCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create room thread", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomThreadResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/rooms/{room_id}/threads/{thread_id}/messages": { + "get": { + "tags": [ + "Room" + ], + "operationId": "thread_messages", + "parameters": [ + { + "name": "room_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "thread_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "before_seq", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "after_seq", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List thread messages", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_RoomMessageListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/search": { + "get": { + "tags": [ + "Search" + ], + "operationId": "search", + "parameters": [ + { + "name": "q", + "in": "query", + "description": "Search keyword", + "required": true, + "schema": { + "type": "string", + "maxLength": 200, + "minLength": 1 + } + }, + { + "name": "type", + "in": "query", + "description": "Comma-separated types: projects,repos,issues,users. Default: all", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number, default 1", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "description": "Results per page, default 20, max 100", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Search results", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SearchResponse" + } + } + } + }, + "400": { + "description": "Bad request" + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/access-keys": { + "get": { + "tags": [ + "User" + ], + "operationId": "list_access_keys", + "responses": { + "200": { + "description": "List access keys", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_AccessKeyListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "User" + ], + "operationId": "create_access_key", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAccessKeyParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create access key", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_AccessKeyResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/access-keys/{access_key_id}": { + "delete": { + "tags": [ + "User" + ], + "operationId": "delete_access_key", + "parameters": [ + { + "name": "access_key_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete access key" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/me/heatmap": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_my_contribution_heatmap", + "responses": { + "200": { + "description": "Get my contribution heatmap", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ContributionHeatmapResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/keys": { + "get": { + "tags": [ + "User" + ], + "operationId": "list_ssh_keys", + "responses": { + "200": { + "description": "List SSH keys", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SshKeyListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "User" + ], + "operationId": "add_ssh_key", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddSshKeyParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add SSH key", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SshKeyResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/keys/{key_id}": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_ssh_key", + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Get SSH key", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SshKeyResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "User" + ], + "operationId": "delete_ssh_key", + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Delete SSH key" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "patch": { + "tags": [ + "User" + ], + "operationId": "update_ssh_key", + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSshKeyParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update SSH key", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_SshKeyResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/me/notifications/preferences": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_notification_preferences", + "responses": { + "200": { + "description": "Get notification preferences", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_NotificationPreferencesResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "patch": { + "tags": [ + "User" + ], + "operationId": "update_notification_preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreferencesParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update notification preferences", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_NotificationPreferencesResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/preferences": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_preferences", + "responses": { + "200": { + "description": "Get user preferences", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PreferencesResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "patch": { + "tags": [ + "User" + ], + "operationId": "update_preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PreferencesParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update user preferences", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_PreferencesResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/profile": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_my_profile", + "responses": { + "200": { + "description": "Get current user profile", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProfileResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + }, + "patch": { + "tags": [ + "User" + ], + "operationId": "update_my_profile", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateProfileParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update current user profile", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProfileResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/projects": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_current_user_projects", + "responses": { + "200": { + "description": "Get current user projects", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_UserProjectsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/me/repos": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_current_user_repos", + "responses": { + "200": { + "description": "Get current user repos", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_UserReposResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/users/{username}": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_profile_by_username", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get user profile", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ProfileResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/follow": { + "get": { + "tags": [ + "User" + ], + "operationId": "is_subscribed_to_target", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Check if following user" + }, + "401": { + "description": "Unauthorized" + } + } + }, + "post": { + "tags": [ + "User" + ], + "operationId": "subscribe_target", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Follow user" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + }, + "delete": { + "tags": [ + "User" + ], + "operationId": "unsubscribe_target", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Unfollow user" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/followers": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_subscribers", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List followers", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_SubscriptionInfo" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/followers/count": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_subscriber_count", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get follower count" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/following/count": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_subscription_count", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get following count" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/heatmap": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_contribution_heatmap", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get contribution heatmap", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_ContributionHeatmapResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/info": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_user_info", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get user info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_UserInfoExternal" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/projects": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_user_projects", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get user projects", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_UserProjectsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/users/{username}/repos": { + "get": { + "tags": [ + "User" + ], + "operationId": "get_user_repos", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get user repos", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_UserReposResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Not found" + } + } + } + }, + "/api/workspaces": { + "post": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WorkspaceInitParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Create workspace", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "409": { + "description": "Slug or name already exists" + } + } + } + }, + "/api/workspaces/invitations/accept": { + "post": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_accept_invitation", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WorkspaceInviteAcceptParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Accept invitation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceInfoResponse" + } + } + } + }, + "400": { + "description": "Invalid or expired token" + }, + "401": { + "description": "Unauthorized" + }, + "409": { + "description": "Already accepted" + } + } + } + }, + "/api/workspaces/me": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_list", + "responses": { + "200": { + "description": "List my workspaces", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceListResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/workspaces/{slug}": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_info", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get workspace info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Workspace not found" + } + } + }, + "delete": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_delete", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Delete workspace" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied (owner only)" + }, + "404": { + "description": "Workspace not found" + } + } + }, + "patch": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_update", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WorkspaceUpdateParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update workspace", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceInfoResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Workspace not found" + }, + "409": { + "description": "Name already exists" + } + } + } + }, + "/api/workspaces/{slug}/billing": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_billing_current", + "parameters": [ + { + "name": "slug", + "in": "path", + "description": "Workspace slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get workspace billing info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceBillingCurrentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Not a workspace member" + }, + "404": { + "description": "Workspace not found" + } + } + } + }, + "/api/workspaces/{slug}/billing/credits": { + "post": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_billing_add_credit", + "parameters": [ + { + "name": "slug", + "in": "path", + "description": "Workspace slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WorkspaceBillingAddCreditParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Add credit to workspace billing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceBillingCurrentResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Not a workspace member" + } + } + } + }, + "/api/workspaces/{slug}/billing/history": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_billing_history", + "parameters": [ + { + "name": "slug", + "in": "path", + "description": "Workspace slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get workspace billing history", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceBillingHistoryResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/api/workspaces/{slug}/invitations": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_pending_invitations", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List pending invitations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_Vec_PendingInvitationInfo" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Workspace not found" + } + } + }, + "post": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_invite_member", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WorkspaceInviteParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Send invitation" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "User not found" + }, + "409": { + "description": "Already a member" + } + } + } + }, + "/api/workspaces/{slug}/invitations/{user_id}": { + "delete": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_cancel_invitation", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Cancel invitation" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Invitation not found" + } + } + } + }, + "/api/workspaces/{slug}/members": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_members", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "per_page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "List workspace members", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceMembersResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Not a member" + }, + "404": { + "description": "Workspace not found" + } + } + } + }, + "/api/workspaces/{slug}/members/role": { + "patch": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_update_member_role", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateRoleParams" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Update member role" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Workspace or member not found" + } + } + } + }, + "/api/workspaces/{slug}/members/{user_id}": { + "delete": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_remove_member", + "parameters": [ + { + "name": "slug", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "user_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Remove member" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Member not found" + } + } + } + }, + "/api/workspaces/{slug}/projects": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_projects", + "parameters": [ + { + "name": "slug", + "in": "path", + "description": "Workspace slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List workspace projects", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceProjectsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Not a workspace member" + }, + "404": { + "description": "Workspace not found" + } + } + } + }, + "/api/workspaces/{slug}/stats": { + "get": { + "tags": [ + "Workspace" + ], + "operationId": "workspace_stats", + "parameters": [ + { + "name": "slug", + "in": "path", + "description": "Workspace slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Get workspace stats", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiResponse_WorkspaceStatsResponse" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "Not a workspace member" + }, + "404": { + "description": "Workspace not found" + } + } + } + } + }, + "components": { + "schemas": { + "AccessKeyListResponse": { + "type": "object", + "required": [ + "access_keys", + "total" + ], + "properties": { + "access_keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AccessKeyResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + }, + "AccessKeyResponse": { + "type": "object", + "required": [ + "id", + "name", + "scopes", + "is_revoked", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "access_key": { + "type": [ + "string", + "null" + ] + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + } + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_revoked": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ActivityLogListResponse": { + "type": "object", + "required": [ + "logs", + "total", + "page", + "per_page" + ], + "properties": { + "logs": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ActivityLogResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "user_role": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/MemberRole", + "description": "The current user's role in the project." + } + ] + } + } + }, + "ActivityLogParams": { + "type": "object", + "required": [ + "event_type", + "title", + "is_private" + ], + "properties": { + "event_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "repo_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "event_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "event_sub_id": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "metadata": {}, + "is_private": { + "type": "boolean" + } + } + }, + "ActivityLogResponse": { + "type": "object", + "required": [ + "id", + "project_uid", + "actor_uid", + "event_type", + "title", + "is_private", + "visibility", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "repo_uid": { + "type": "string", + "format": "uuid" + }, + "actor_uid": { + "type": "string", + "format": "uuid" + }, + "event_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "content": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "visibility": { + "type": "string" + }, + "created_at": { + "type": "string" + } + } + }, + "AddSshKeyParams": { + "type": "object", + "required": [ + "title", + "public_key" + ], + "properties": { + "title": { + "type": "string" + }, + "public_key": { + "type": "string" + } + } + }, + "AnswerRequest": { + "type": "object", + "required": [ + "question", + "answer" + ], + "properties": { + "question": { + "type": "string" + }, + "answer": { + "type": "string" + } + } + }, + "ApiError": { + "type": "object", + "required": [ + "code", + "error", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32", + "description": "Error numeric code" + }, + "error": { + "type": "string", + "description": "Error slug identifier" + }, + "message": { + "type": "string", + "description": "Human-readable error message" + } + } + }, + "ApiResponse_AccessKeyListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "access_keys", + "total" + ], + "properties": { + "access_keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AccessKeyResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_AccessKeyResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "name", + "scopes", + "is_revoked", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "access_key": { + "type": [ + "string", + "null" + ] + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + } + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_revoked": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ActivityLogListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "logs", + "total", + "page", + "per_page" + ], + "properties": { + "logs": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ActivityLogResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "user_role": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/MemberRole", + "description": "The current user's role in the project." + } + ] + } + } + } + } + }, + "ApiResponse_ActivityLogResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project_uid", + "actor_uid", + "event_type", + "title", + "is_private", + "visibility", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "repo_uid": { + "type": "string", + "format": "uuid" + }, + "actor_uid": { + "type": "string", + "format": "uuid" + }, + "event_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "content": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "visibility": { + "type": "string" + }, + "created_at": { + "type": "string" + } + } + } + } + }, + "ApiResponse_ApiError": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "code", + "error", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32", + "description": "Error numeric code" + }, + "error": { + "type": "string", + "description": "Error slug identifier" + }, + "message": { + "type": "string", + "description": "Human-readable error message" + } + } + } + } + }, + "ApiResponse_ApprovalCheckResult": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "enough_approvals", + "approvals", + "required", + "reviewers" + ], + "properties": { + "enough_approvals": { + "type": "boolean" + }, + "approvals": { + "type": "integer", + "format": "int32" + }, + "required": { + "type": "integer", + "format": "int32" + }, + "reviewers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewerInfo" + } + } + } + } + } + }, + "ApiResponse_ArchiveCachedResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "format", + "cached" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "cached": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_ArchiveInvalidateAllResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "count" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_ArchiveInvalidateResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "format", + "invalidated" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "invalidated": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_ArchiveListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "entries", + "total_entries" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ArchiveEntryResponse" + } + }, + "total_entries": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_ArchiveResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "format", + "size", + "data" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "data": { + "type": "string" + } + } + } + } + }, + "ApiResponse_ArchiveSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commit_oid", + "format", + "total_entries", + "total_size" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "total_entries": { + "type": "integer", + "minimum": 0 + }, + "total_size": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_AuditLogResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project_uid", + "actor_uid", + "action" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "actor_uid": { + "type": "string", + "format": "uuid" + }, + "action": { + "type": "string" + }, + "details": { + "type": "object" + } + } + } + } + }, + "ApiResponse_BlobContentResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "size", + "is_binary", + "content" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + }, + "content": { + "type": "string" + } + } + } + } + }, + "ApiResponse_BlobCreateResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "size" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_BlobExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BlobInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "size", + "is_binary" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BlobIsBinaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "is_binary" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_binary": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BlobSizeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "size" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_BoardResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project", + "name", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_BoardWithColumnsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "board", + "columns" + ], + "properties": { + "board": { + "$ref": "#/components/schemas/BoardResponse" + }, + "columns": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ColumnWithCardsResponse" + } + } + } + } + } + }, + "ApiResponse_BranchDiffResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "ahead", + "behind", + "diverged" + ], + "properties": { + "ahead": { + "type": "integer", + "minimum": 0 + }, + "behind": { + "type": "integer", + "minimum": 0 + }, + "diverged": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchFastForwardResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + } + } + }, + "ApiResponse_BranchInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "oid", + "is_head", + "is_remote", + "is_current" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "is_head": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_current": { + "type": "boolean" + }, + "upstream": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_BranchIsAncestorResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "child", + "ancestor", + "is_ancestor" + ], + "properties": { + "child": { + "type": "string" + }, + "ancestor": { + "type": "string" + }, + "is_ancestor": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchIsConflictedResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "is_conflicted" + ], + "properties": { + "is_conflicted": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchIsDetachedResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "is_detached" + ], + "properties": { + "is_detached": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchIsHeadResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "is_head" + ], + "properties": { + "name": { + "type": "string" + }, + "is_head": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchIsMergedResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "branch", + "into", + "is_merged" + ], + "properties": { + "branch": { + "type": "string" + }, + "into": { + "type": "string" + }, + "is_merged": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchMergeBaseResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "branch1", + "branch2", + "base" + ], + "properties": { + "branch1": { + "type": "string" + }, + "branch2": { + "type": "string" + }, + "base": { + "type": "string" + } + } + } + } + }, + "ApiResponse_BranchProtectionResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "repo", + "branch", + "forbid_push", + "forbid_pull", + "forbid_merge", + "forbid_deletion", + "forbid_force_push", + "forbid_tag_push", + "required_approvals", + "dismiss_stale_reviews", + "require_linear_history", + "allow_fork_syncing" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "branch": { + "type": "string" + }, + "forbid_push": { + "type": "boolean" + }, + "forbid_pull": { + "type": "boolean" + }, + "forbid_merge": { + "type": "boolean" + }, + "forbid_deletion": { + "type": "boolean" + }, + "forbid_force_push": { + "type": "boolean" + }, + "forbid_tag_push": { + "type": "boolean" + }, + "required_approvals": { + "type": "integer", + "format": "int32" + }, + "dismiss_stale_reviews": { + "type": "boolean" + }, + "require_linear_history": { + "type": "boolean" + }, + "allow_fork_syncing": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_BranchSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "local_count", + "remote_count", + "all_count" + ], + "properties": { + "local_count": { + "type": "integer", + "minimum": 0 + }, + "remote_count": { + "type": "integer", + "minimum": 0 + }, + "all_count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_BranchTrackingDiffResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "ahead", + "behind" + ], + "properties": { + "name": { + "type": "string" + }, + "ahead": { + "type": "integer", + "minimum": 0 + }, + "behind": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CaptchaResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "base64", + "req" + ], + "properties": { + "base64": { + "type": "string" + }, + "rsa": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/RsaResponse" + } + ] + }, + "req": { + "$ref": "#/components/schemas/CaptchaQuery" + } + } + } + } + }, + "ApiResponse_CardResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "column", + "title", + "position", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "column": { + "type": "string", + "format": "uuid" + }, + "issue_id": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "project": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "position": { + "type": "integer", + "format": "int32" + }, + "assignee_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "due_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "priority": { + "type": [ + "string", + "null" + ] + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ColumnResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "board", + "name", + "position" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "board": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "wip_limit": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "color": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_CommitAuthorResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "author" + ], + "properties": { + "oid": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + } + } + } + } + }, + "ApiResponse_CommitBranchesResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "properties": { + "data": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_CommitCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitCreateResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + } + } + }, + "ApiResponse_CommitExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_CommitGraphReactResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commits", + "lanes", + "max_parents" + ], + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitGraphReactCommit" + } + }, + "lanes": { + "type": "array", + "items": { + "$ref": "#/components/schemas/LaneInfo" + } + }, + "max_parents": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitGraphResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "lines", + "max_parents" + ], + "properties": { + "lines": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitGraphLineResponse" + } + }, + "max_parents": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitIsCommitResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "is_commit" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_commit": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_CommitIsMergeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "is_merge" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_merge": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_CommitIsTipResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "is_tip" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_tip": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_CommitLogResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "data", + "total", + "page", + "per_page", + "total_pages" + ], + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitMetaResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + }, + "page": { + "type": "integer", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "minimum": 0 + }, + "total_pages": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitMessageResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "message" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + } + } + } + } + }, + "ApiResponse_CommitMetaResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "message", + "summary", + "author", + "committer", + "tree_id", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "committer": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "tree_id": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "encoding": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_CommitParentCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "parent_count" + ], + "properties": { + "oid": { + "type": "string" + }, + "parent_count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitParentIdsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_CommitRefCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "ref_count" + ], + "properties": { + "oid": { + "type": "string" + }, + "ref_count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_CommitShortIdResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "short_id" + ], + "properties": { + "oid": { + "type": "string" + }, + "short_id": { + "type": "string" + } + } + } + } + }, + "ApiResponse_CommitSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "summary" + ], + "properties": { + "oid": { + "type": "string" + }, + "summary": { + "type": "string" + } + } + } + } + }, + "ApiResponse_CommitTagsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "properties": { + "data": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_CommitTreeIdResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "tree_id" + ], + "properties": { + "oid": { + "type": "string" + }, + "tree_id": { + "type": "string" + } + } + } + } + }, + "ApiResponse_ConfigBoolResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "key", + "value" + ], + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_ConfigSnapshotResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "entries" + ], + "properties": { + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ConfigEntryResponse" + } + } + } + } + } + }, + "ApiResponse_ContextMe": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "uid", + "username", + "has_unread_notifications" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "has_unread_notifications": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_ContributionHeatmapResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "username", + "total_contributions", + "heatmap", + "start_date", + "end_date" + ], + "properties": { + "username": { + "type": "string" + }, + "total_contributions": { + "type": "integer", + "format": "int64" + }, + "heatmap": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ContributionHeatmapItem" + } + }, + "start_date": { + "type": "string" + }, + "end_date": { + "type": "string" + } + } + } + } + }, + "ApiResponse_ContributorsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "total", + "contributors" + ], + "properties": { + "total": { + "type": "integer", + "minimum": 0 + }, + "contributors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ContributorStats" + } + } + } + } + } + }, + "ApiResponse_DeleteSkillResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "deleted", + "slug" + ], + "properties": { + "deleted": { + "type": "boolean" + }, + "slug": { + "type": "string" + } + } + } + } + }, + "ApiResponse_DescriptionResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + } + } + } + } + }, + "ApiResponse_DiffPatchIdResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "old_tree", + "new_tree", + "patch_id" + ], + "properties": { + "old_tree": { + "type": "string" + }, + "new_tree": { + "type": "string" + }, + "patch_id": { + "type": "string" + } + } + } + } + }, + "ApiResponse_DiffResultResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "stats", + "deltas" + ], + "properties": { + "stats": { + "$ref": "#/components/schemas/DiffStatsResponse" + }, + "deltas": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DiffDeltaResponse" + } + } + } + } + } + }, + "ApiResponse_DiffStatsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "files_changed", + "insertions", + "deletions" + ], + "properties": { + "files_changed": { + "type": "integer", + "minimum": 0 + }, + "insertions": { + "type": "integer", + "minimum": 0 + }, + "deletions": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_EmailResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "properties": { + "email": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_GitInitResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "path", + "is_bare" + ], + "properties": { + "path": { + "type": "string" + }, + "is_bare": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_GitReadmeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "properties": { + "path": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "size": { + "type": [ + "integer", + "null" + ], + "minimum": 0 + }, + "encoding": { + "type": [ + "string", + "null" + ] + }, + "truncated": { + "type": "boolean" + }, + "is_binary": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_InvitationListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "invitations", + "total", + "page", + "per_page" + ], + "properties": { + "invitations": { + "type": "array", + "items": { + "$ref": "#/components/schemas/InvitationResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_IsLikeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "is_like" + ], + "properties": { + "is_like": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_IsWatchResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "is_watching" + ], + "properties": { + "is_watching": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_IssueAssigneeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "assigned_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "assigned_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueCommentListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "comments", + "total", + "page", + "per_page" + ], + "properties": { + "comments": { + "type": "array", + "items": { + "$ref": "#/components/schemas/IssueCommentResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_IssueCommentResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "issue", + "author", + "author_username", + "body", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "issue": { + "type": "string", + "format": "uuid" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": "string" + }, + "body": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueLabelResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issue", + "label_id", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "label_id": { + "type": "integer", + "format": "int64" + }, + "label_name": { + "type": [ + "string", + "null" + ] + }, + "label_color": { + "type": [ + "string", + "null" + ] + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issues", + "total", + "page", + "per_page" + ], + "properties": { + "issues": { + "type": "array", + "items": { + "$ref": "#/components/schemas/IssueResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_IssuePullRequestResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issue", + "repo", + "number", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueRepoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issue", + "repo", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project", + "number", + "title", + "state", + "author", + "created_at", + "updated_at", + "created_by_ai" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "milestone": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "closed_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_by_ai": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_IssueSubscriberResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "subscribed", + "created_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "subscribed": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_IssueSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "total", + "open", + "closed" + ], + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "open": { + "type": "integer", + "format": "int64" + }, + "closed": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_JoinAnswersListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "request_id", + "project_uid", + "answers" + ], + "properties": { + "request_id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string" + }, + "answers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/JoinAnswerResponse" + } + } + } + } + } + }, + "ApiResponse_JoinRequestListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "requests", + "total", + "page", + "per_page" + ], + "properties": { + "requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/JoinRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_JoinSettingsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "project_uid", + "require_approval", + "require_questions", + "questions" + ], + "properties": { + "project_uid": { + "type": "string" + }, + "require_approval": { + "type": "boolean" + }, + "require_questions": { + "type": "boolean" + }, + "questions": {} + } + } + } + }, + "ApiResponse_LabelListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "labels", + "total" + ], + "properties": { + "labels": { + "type": "array", + "items": { + "$ref": "#/components/schemas/LabelResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_LabelResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project_uid", + "name", + "color", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "color": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_MemberListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "members", + "total", + "page", + "per_page" + ], + "properties": { + "members": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MemberInfo" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_MergeAnalysisResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "can_fast_forward", + "is_up_to_date", + "is_normal", + "analysis_flags", + "supported_strategies" + ], + "properties": { + "can_fast_forward": { + "type": "boolean" + }, + "is_up_to_date": { + "type": "boolean" + }, + "is_normal": { + "type": "boolean" + }, + "analysis_flags": { + "type": "array", + "items": { + "type": "string" + } + }, + "supported_strategies": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Strategies supported given the current state of the PR." + } + } + } + } + }, + "ApiResponse_MergeConflictResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "has_conflicts", + "conflicted_files" + ], + "properties": { + "has_conflicts": { + "type": "boolean" + }, + "conflicted_files": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MergeConflictFile" + } + } + } + } + } + }, + "ApiResponse_MergeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo", + "number", + "status", + "merged_by", + "merged_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "status": { + "type": "string" + }, + "merged_by": { + "type": "string", + "format": "uuid" + }, + "merged_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_MessageEditHistoryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "message_id", + "history", + "total_edits" + ], + "properties": { + "message_id": { + "type": "string", + "format": "uuid" + }, + "history": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MessageEditHistoryEntry" + } + }, + "total_edits": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_MessageSearchResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "messages", + "total" + ], + "properties": { + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/RoomMessageResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_NotificationListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "notifications", + "total", + "unread_count" + ], + "properties": { + "notifications": { + "type": "array", + "items": { + "$ref": "#/components/schemas/NotificationResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "unread_count": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_NotificationPreferencesResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "user_id", + "email_enabled", + "in_app_enabled", + "push_enabled", + "digest_mode", + "dnd_enabled", + "marketing_enabled", + "security_enabled", + "product_enabled", + "created_at", + "updated_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "email_enabled": { + "type": "boolean" + }, + "in_app_enabled": { + "type": "boolean" + }, + "push_enabled": { + "type": "boolean" + }, + "digest_mode": { + "type": "string" + }, + "dnd_enabled": { + "type": "boolean" + }, + "dnd_start_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "marketing_enabled": { + "type": "boolean" + }, + "security_enabled": { + "type": "boolean" + }, + "product_enabled": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_Option_CommitMetaResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "object", + "required": [ + "oid", + "message", + "summary", + "author", + "committer", + "tree_id", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "committer": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "tree_id": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "encoding": { + "type": [ + "string", + "null" + ] + } + } + } + ] + } + } + }, + "ApiResponse_PrCommitsListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "commits" + ], + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PrCommitResponse" + } + } + } + } + } + }, + "ApiResponse_PreferencesResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "language", + "theme", + "timezone", + "email_notifications", + "in_app_notifications", + "created_at", + "updated_at" + ], + "properties": { + "language": { + "type": "string" + }, + "theme": { + "type": "string" + }, + "timezone": { + "type": "string" + }, + "email_notifications": { + "type": "boolean" + }, + "in_app_notifications": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ProfileResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "uid", + "username", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "website_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "last_sign_in_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ProjectBillingCurrentResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "project_uid", + "currency", + "monthly_quota", + "balance", + "month_used", + "cycle_start_utc", + "cycle_end_utc", + "updated_at", + "created_at" + ], + "properties": { + "project_uid": { + "type": "string", + "format": "uuid" + }, + "currency": { + "type": "string" + }, + "monthly_quota": { + "type": "number", + "format": "double" + }, + "balance": { + "type": "number", + "format": "double" + }, + "month_used": { + "type": "number", + "format": "double" + }, + "cycle_start_utc": { + "type": "string", + "format": "date-time" + }, + "cycle_end_utc": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ProjectBillingHistoryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "page", + "per_page", + "total", + "list" + ], + "properties": { + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "list": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectBillingHistoryItem" + } + } + } + } + } + }, + "ApiResponse_ProjectInfoRelational": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at", + "created_by", + "created_username_name", + "member_count", + "like_count", + "watch_count", + "keys", + "labels", + "is_like", + "is_watch" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_username_name": { + "type": "string" + }, + "created_display_name": { + "type": [ + "string", + "null" + ] + }, + "created_avatar_url": { + "type": [ + "string", + "null" + ] + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "like_count": { + "type": "integer", + "format": "int64" + }, + "watch_count": { + "type": "integer", + "format": "int64" + }, + "keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectInfoKeyValue" + } + }, + "labels": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectInfoLabel" + } + }, + "role": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/MemberRole" + } + ] + }, + "is_like": { + "type": "boolean" + }, + "is_watch": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_ProjectInitResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "params", + "project" + ], + "properties": { + "params": { + "$ref": "#/components/schemas/ProjectInitParams" + }, + "project": { + "$ref": "#/components/schemas/ProjectModel" + } + } + } + } + }, + "ApiResponse_ProjectRepoCreateResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "uid", + "repo_name", + "default_branch", + "project_name", + "is_private", + "storage_path", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "repo_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "default_branch": { + "type": "string" + }, + "project_name": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "storage_path": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ProjectRepositoryPagination": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "items", + "total" + ], + "properties": { + "items": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectRepositoryItem" + } + }, + "cursor": { + "type": [ + "string", + "null" + ] + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_PullRequestListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "pull_requests", + "total", + "page", + "per_page" + ], + "properties": { + "pull_requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PullRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_PullRequestResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo", + "number", + "title", + "author", + "base", + "head", + "status", + "created_at", + "updated_at", + "created_by_ai" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "issue": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "base": { + "type": "string" + }, + "head": { + "type": "string" + }, + "status": { + "type": "string" + }, + "merged_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "merged_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_by_ai": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_PullRequestSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "total", + "open", + "merged", + "closed" + ], + "properties": { + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "open": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "merged": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "closed": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_ReactionListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "reactions" + ], + "properties": { + "reactions": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReactionSummary" + } + } + } + } + } + }, + "ApiResponse_ReactionResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "user", + "reaction", + "created_at" + ], + "properties": { + "user": { + "type": "string", + "format": "uuid" + }, + "reaction": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_RefDeleteResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "oid" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + } + } + } + } + }, + "ApiResponse_RefExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_RefInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "is_symbolic", + "is_branch", + "is_remote", + "is_tag", + "is_note" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + }, + "target": { + "type": [ + "string", + "null" + ] + }, + "is_symbolic": { + "type": "boolean" + }, + "is_branch": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_tag": { + "type": "boolean" + }, + "is_note": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_RefTargetResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_RefUpdateResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "old_oid": { + "type": [ + "string", + "null" + ] + }, + "new_oid": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_ReviewCommentListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "comments", + "threads", + "total" + ], + "properties": { + "comments": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewCommentResponse" + }, + "description": "Flat list of all comments (kept for backward compatibility)." + }, + "threads": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewCommentThread" + }, + "description": "Comments grouped into threads (root comments with their replies)." + }, + "total": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_ReviewCommentResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo", + "number", + "id", + "body", + "author", + "resolved", + "created_at", + "updated_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "review": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "path": { + "type": [ + "string", + "null" + ] + }, + "side": { + "type": [ + "string", + "null" + ] + }, + "line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "old_line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "body": { + "type": "string" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "resolved": { + "type": "boolean" + }, + "in_reply_to": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ReviewListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "reviews" + ], + "properties": { + "reviews": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewResponse" + } + } + } + } + } + }, + "ApiResponse_ReviewRequestListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "requests", + "total" + ], + "properties": { + "requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_ReviewRequestResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo", + "number", + "reviewer", + "requested_by", + "requested_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "reviewer": { + "type": "string", + "format": "uuid" + }, + "reviewer_username": { + "type": [ + "string", + "null" + ] + }, + "requested_by": { + "type": "string", + "format": "uuid" + }, + "requested_by_username": { + "type": [ + "string", + "null" + ] + }, + "requested_at": { + "type": "string", + "format": "date-time" + }, + "dismissed_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "dismissed_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "dismissed_by_username": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_ReviewResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo", + "number", + "reviewer", + "state", + "created_at", + "updated_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "reviewer": { + "type": "string", + "format": "uuid" + }, + "reviewer_username": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "submitted_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_RoomAiResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "room", + "model", + "call_count", + "use_exact", + "think", + "stream", + "created_at", + "updated_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "model": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "call_count": { + "type": "integer", + "format": "int64" + }, + "last_call_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "history_limit": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "system_prompt": { + "type": [ + "string", + "null" + ] + }, + "temperature": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "max_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "use_exact": { + "type": "boolean" + }, + "think": { + "type": "boolean" + }, + "stream": { + "type": "boolean" + }, + "min_score": { + "type": [ + "number", + "null" + ], + "format": "float" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_RoomCategoryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project", + "name", + "position", + "created_by", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_RoomMemberResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "room", + "user", + "role", + "do_not_disturb" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "user": { + "type": "string", + "format": "uuid" + }, + "user_info": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserInfo" + } + ] + }, + "role": { + "type": "string" + }, + "first_msg_in": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "joined_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_read_seq": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "do_not_disturb": { + "type": "boolean" + }, + "dnd_start_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + } + } + }, + "ApiResponse_RoomMessageListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "messages", + "total" + ], + "properties": { + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/RoomMessageResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_RoomMessageResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "seq", + "room", + "sender_type", + "content", + "content_type", + "send_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "seq": { + "type": "integer", + "format": "int64" + }, + "room": { + "type": "string", + "format": "uuid" + }, + "sender_type": { + "type": "string" + }, + "sender_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "thread": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "in_reply_to": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "content": { + "type": "string" + }, + "content_type": { + "type": "string" + }, + "edited_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "send_at": { + "type": "string", + "format": "date-time" + }, + "revoked": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "revoked_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + } + } + }, + "ApiResponse_RoomPinResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "room", + "message", + "pinned_by", + "pinned_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "message": { + "type": "string", + "format": "uuid" + }, + "pinned_by": { + "type": "string", + "format": "uuid" + }, + "pinned_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_RoomResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project", + "room_name", + "public", + "created_by", + "created_at", + "last_msg_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "room_name": { + "type": "string" + }, + "public": { + "type": "boolean" + }, + "category": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "last_msg_at": { + "type": "string", + "format": "date-time" + }, + "unread_count": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_RoomThreadResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "room", + "parent", + "created_by", + "participants", + "last_message_at", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "room": { + "type": "string", + "format": "uuid" + }, + "parent": { + "type": "integer", + "format": "int64" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "participants": {}, + "last_message_at": { + "type": "string", + "format": "date-time" + }, + "last_message_preview": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_ScanResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "discovered", + "created", + "updated", + "removed" + ], + "properties": { + "discovered": { + "type": "integer", + "format": "int64" + }, + "created": { + "type": "integer", + "format": "int64" + }, + "updated": { + "type": "integer", + "format": "int64" + }, + "removed": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_SearchResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "query" + ], + "properties": { + "query": { + "type": "string" + }, + "projects": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_ProjectSearchItem" + } + ] + }, + "repos": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_RepoSearchItem" + } + ] + }, + "issues": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_IssueSearchItem" + } + ] + }, + "users": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_UserSearchItem" + } + ] + } + } + } + } + }, + "ApiResponse_SideBySideDiffResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "files", + "total_additions", + "total_deletions" + ], + "properties": { + "files": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SideBySideFileResponse" + } + }, + "total_additions": { + "type": "integer", + "minimum": 0 + }, + "total_deletions": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_SkillResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "project_uuid", + "slug", + "name", + "source", + "content", + "metadata", + "enabled", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uuid": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "source": { + "type": "string" + }, + "repo_id": { + "type": [ + "string", + "null" + ] + }, + "commit_sha": { + "type": [ + "string", + "null" + ] + }, + "blob_hash": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": "string" + }, + "metadata": {}, + "enabled": { + "type": "boolean" + }, + "created_by": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_SshKeyListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "keys", + "total" + ], + "properties": { + "keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SshKeyResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_SshKeyResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "user_uid", + "title", + "fingerprint", + "key_type", + "is_verified", + "is_revoked", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "title": { + "type": "string" + }, + "fingerprint": { + "type": "string" + }, + "key_type": { + "type": "string" + }, + "key_bits": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "is_verified": { + "type": "boolean" + }, + "last_used_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_revoked": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_StarCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_StarUserListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "users" + ], + "properties": { + "users": { + "type": "array", + "items": { + "$ref": "#/components/schemas/StarUserInfo" + } + } + } + } + } + }, + "ApiResponse_String": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "string" + } + } + }, + "ApiResponse_TagCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_TagExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_TagInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "oid", + "target", + "is_annotated" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "target": { + "type": "string" + }, + "is_annotated": { + "type": "boolean" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "tagger": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_TagIsAnnotatedResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "is_annotated" + ], + "properties": { + "name": { + "type": "string" + }, + "is_annotated": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_TagMessageResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_TagSummaryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "total_count" + ], + "properties": { + "total_count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_TagTaggerResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "tagger": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_TagTargetResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "ApiResponse_TransferRepoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "repo_id", + "old_project_name", + "new_project_name", + "repo_name" + ], + "properties": { + "repo_id": { + "type": "string", + "format": "uuid" + }, + "old_project_name": { + "type": "string" + }, + "new_project_name": { + "type": "string" + }, + "repo_name": { + "type": "string" + } + } + } + } + }, + "ApiResponse_TreeDiffStatsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "old_tree", + "new_tree", + "files_changed", + "insertions", + "deletions" + ], + "properties": { + "old_tree": { + "type": "string" + }, + "new_tree": { + "type": "string" + }, + "files_changed": { + "type": "integer", + "minimum": 0 + }, + "insertions": { + "type": "integer", + "minimum": 0 + }, + "deletions": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_TreeEntryCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "count" + ], + "properties": { + "oid": { + "type": "string" + }, + "count": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_TreeEntryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "oid", + "kind", + "filemode", + "is_binary" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "filemode": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_TreeExistsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_TreeInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "entry_count", + "is_empty" + ], + "properties": { + "oid": { + "type": "string" + }, + "entry_count": { + "type": "integer", + "minimum": 0 + }, + "is_empty": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_TreeIsEmptyResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "oid", + "is_empty" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_empty": { + "type": "boolean" + } + } + } + } + }, + "ApiResponse_UserInfoExternal": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "user_uid", + "username", + "display_name", + "timezone", + "language", + "is_owner", + "is_subscribe", + "total_projects", + "total_repos" + ], + "properties": { + "user_uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "master_email": { + "type": [ + "string", + "null" + ] + }, + "timezone": { + "type": "string" + }, + "language": { + "type": "string" + }, + "website_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "last_sign_in_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_owner": { + "type": "boolean" + }, + "is_subscribe": { + "type": "boolean" + }, + "total_projects": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total_repos": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_UserProjectsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "username", + "projects", + "total_count" + ], + "properties": { + "username": { + "type": "string" + }, + "projects": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserProjectInfo" + } + }, + "total_count": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_UserReposResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "username", + "repos", + "total_count" + ], + "properties": { + "username": { + "type": "string" + }, + "repos": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserRepoInfo" + } + }, + "total_count": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_Value": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": {} + } + }, + "ApiResponse_Vec_AuditLogResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project_uid", + "actor_uid", + "action" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "actor_uid": { + "type": "string", + "format": "uuid" + }, + "action": { + "type": "string" + }, + "details": { + "type": "object" + } + } + } + } + } + }, + "ApiResponse_Vec_BoardResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project", + "name", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_BranchInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "name", + "oid", + "is_head", + "is_remote", + "is_current" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "is_head": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_current": { + "type": "boolean" + }, + "upstream": { + "type": [ + "string", + "null" + ] + } + } + } + } + } + }, + "ApiResponse_Vec_BranchProtectionResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "repo", + "branch", + "forbid_push", + "forbid_pull", + "forbid_merge", + "forbid_deletion", + "forbid_force_push", + "forbid_tag_push", + "required_approvals", + "dismiss_stale_reviews", + "require_linear_history", + "allow_fork_syncing" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "branch": { + "type": "string" + }, + "forbid_push": { + "type": "boolean" + }, + "forbid_pull": { + "type": "boolean" + }, + "forbid_merge": { + "type": "boolean" + }, + "forbid_deletion": { + "type": "boolean" + }, + "forbid_force_push": { + "type": "boolean" + }, + "forbid_tag_push": { + "type": "boolean" + }, + "required_approvals": { + "type": "integer", + "format": "int32" + }, + "dismiss_stale_reviews": { + "type": "boolean" + }, + "require_linear_history": { + "type": "boolean" + }, + "allow_fork_syncing": { + "type": "boolean" + } + } + } + } + } + }, + "ApiResponse_Vec_CommitMetaResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "oid", + "message", + "summary", + "author", + "committer", + "tree_id", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "committer": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "tree_id": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "encoding": { + "type": [ + "string", + "null" + ] + } + } + } + } + } + }, + "ApiResponse_Vec_CommitRefInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "name", + "target", + "is_remote", + "is_tag" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": "string" + }, + "is_remote": { + "type": "boolean" + }, + "is_tag": { + "type": "boolean" + } + } + } + } + } + }, + "ApiResponse_Vec_CommitReflogEntryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "new_sha", + "old_sha", + "committer_name", + "committer_email", + "time_secs", + "ref_name" + ], + "properties": { + "new_sha": { + "type": "string" + }, + "old_sha": { + "type": "string" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "time_secs": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "ref_name": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_Vec_IssueAssigneeResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "assigned_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "assigned_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_IssueLabelResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "issue", + "label_id", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "label_id": { + "type": "integer", + "format": "int64" + }, + "label_name": { + "type": [ + "string", + "null" + ] + }, + "label_color": { + "type": [ + "string", + "null" + ] + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_IssuePullRequestResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "issue", + "repo", + "number", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_IssueRepoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "issue", + "repo", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_IssueSubscriberResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "subscribed", + "created_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "subscribed": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_LabelResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project", + "name", + "color" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "color": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_Vec_LikeUserInfo": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "username", + "avatar_url" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_Vec_MentionNotificationResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "message_id", + "mentioned_by", + "mentioned_by_name", + "content_preview", + "room_id", + "room_name", + "created_at" + ], + "properties": { + "message_id": { + "type": "string", + "format": "uuid" + }, + "mentioned_by": { + "type": "string", + "format": "uuid" + }, + "mentioned_by_name": { + "type": "string" + }, + "content_preview": { + "type": "string" + }, + "room_id": { + "type": "string", + "format": "uuid" + }, + "room_name": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_MergeheadInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_Vec_PendingInvitationInfo": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "user_id", + "username", + "role", + "invited_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "email": { + "type": [ + "string", + "null" + ] + }, + "role": { + "type": "string" + }, + "invited_by_username": { + "type": [ + "string", + "null" + ] + }, + "invited_at": { + "type": "string", + "format": "date-time" + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_RefInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "name", + "is_symbolic", + "is_branch", + "is_remote", + "is_tag", + "is_note" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + }, + "target": { + "type": [ + "string", + "null" + ] + }, + "is_symbolic": { + "type": "boolean" + }, + "is_branch": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_tag": { + "type": "boolean" + }, + "is_note": { + "type": "boolean" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomAiResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "room", + "model", + "call_count", + "use_exact", + "think", + "stream", + "created_at", + "updated_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "model": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "call_count": { + "type": "integer", + "format": "int64" + }, + "last_call_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "history_limit": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "system_prompt": { + "type": [ + "string", + "null" + ] + }, + "temperature": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "max_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "use_exact": { + "type": "boolean" + }, + "think": { + "type": "boolean" + }, + "stream": { + "type": "boolean" + }, + "min_score": { + "type": [ + "number", + "null" + ], + "format": "float" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomCategoryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project", + "name", + "position", + "created_by", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomMemberResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "room", + "user", + "role", + "do_not_disturb" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "user": { + "type": "string", + "format": "uuid" + }, + "user_info": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserInfo" + } + ] + }, + "role": { + "type": "string" + }, + "first_msg_in": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "joined_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_read_seq": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "do_not_disturb": { + "type": "boolean" + }, + "dnd_start_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomPinResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "room", + "message", + "pinned_by", + "pinned_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "message": { + "type": "string", + "format": "uuid" + }, + "pinned_by": { + "type": "string", + "format": "uuid" + }, + "pinned_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project", + "room_name", + "public", + "created_by", + "created_at", + "last_msg_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "room_name": { + "type": "string" + }, + "public": { + "type": "boolean" + }, + "category": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "last_msg_at": { + "type": "string", + "format": "date-time" + }, + "unread_count": { + "type": "integer", + "format": "int64" + } + } + } + } + } + }, + "ApiResponse_Vec_RoomThreadResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "room", + "parent", + "created_by", + "participants", + "last_message_at", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "room": { + "type": "string", + "format": "uuid" + }, + "parent": { + "type": "integer", + "format": "int64" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "participants": {}, + "last_message_at": { + "type": "string", + "format": "date-time" + }, + "last_message_preview": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_SkillResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "project_uuid", + "slug", + "name", + "source", + "content", + "metadata", + "enabled", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uuid": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "source": { + "type": "string" + }, + "repo_id": { + "type": [ + "string", + "null" + ] + }, + "commit_sha": { + "type": [ + "string", + "null" + ] + }, + "blob_hash": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": "string" + }, + "metadata": {}, + "enabled": { + "type": "boolean" + }, + "created_by": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + } + }, + "ApiResponse_Vec_String": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "ApiResponse_Vec_SubscriptionInfo": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "user_uid", + "target_uid", + "subscribed_at", + "is_active" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "target_uid": { + "type": "string", + "format": "uuid" + }, + "subscribed_at": { + "type": "string", + "format": "date-time" + }, + "is_active": { + "type": "boolean" + } + } + } + } + } + }, + "ApiResponse_Vec_TagInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "name", + "oid", + "target", + "is_annotated" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "target": { + "type": "string" + }, + "is_annotated": { + "type": "boolean" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "tagger": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + } + } + } + } + } + }, + "ApiResponse_Vec_TreeEntryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "name", + "oid", + "kind", + "filemode", + "is_binary" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "filemode": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + } + } + } + }, + "ApiResponse_Vec_WatchUserInfo": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "username", + "avatar_url" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": "string" + } + } + } + } + } + }, + "ApiResponse_WatchCountResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_WatchUserListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "users" + ], + "properties": { + "users": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WatchUserInfo" + } + } + } + } + } + }, + "ApiResponse_WebhookListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "webhooks", + "total" + ], + "properties": { + "webhooks": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WebhookResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_WebhookResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "repo_uuid", + "url", + "content_type", + "events", + "active", + "created_at", + "touch_count" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "repo_uuid": { + "type": "string" + }, + "url": { + "type": "string" + }, + "content_type": { + "type": "string" + }, + "secret": { + "type": [ + "string", + "null" + ] + }, + "events": { + "$ref": "#/components/schemas/WebhookEvent" + }, + "active": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "last_delivered_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "touch_count": { + "type": "integer", + "format": "int64" + } + } + } + } + }, + "ApiResponse_WorkspaceBillingCurrentResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "workspace_id", + "currency", + "monthly_quota", + "balance", + "total_spent", + "month_used", + "cycle_start_utc", + "cycle_end_utc", + "updated_at", + "created_at" + ], + "properties": { + "workspace_id": { + "type": "string", + "format": "uuid" + }, + "currency": { + "type": "string" + }, + "monthly_quota": { + "type": "number", + "format": "double" + }, + "balance": { + "type": "number", + "format": "double" + }, + "total_spent": { + "type": "number", + "format": "double" + }, + "month_used": { + "type": "number", + "format": "double" + }, + "cycle_start_utc": { + "type": "string", + "format": "date-time" + }, + "cycle_end_utc": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_WorkspaceBillingHistoryResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "page", + "per_page", + "total", + "list" + ], + "properties": { + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "list": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceBillingHistoryItem" + } + } + } + } + } + }, + "ApiResponse_WorkspaceInfoResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "id", + "slug", + "name", + "plan", + "member_count", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "plan": { + "type": "string" + }, + "billing_email": { + "type": [ + "string", + "null" + ] + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "my_role": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "ApiResponse_WorkspaceListResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "workspaces", + "total" + ], + "properties": { + "workspaces": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceListItem" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_WorkspaceMembersResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "members", + "total", + "page", + "per_page" + ], + "properties": { + "members": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceMemberInfo" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_WorkspaceProjectsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "projects", + "total", + "page", + "per_page" + ], + "properties": { + "projects": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceProjectItem" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + } + } + }, + "ApiResponse_WorkspaceStatsResponse": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "project_count", + "member_count", + "recent_activities" + ], + "properties": { + "project_count": { + "type": "integer", + "format": "int64" + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "my_role": { + "type": [ + "string", + "null" + ] + }, + "recent_activities": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceActivityItem" + } + } + } + } + } + }, + "ApiResponse_bool": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "data": { + "type": "boolean" + } + } + }, + "ApprovalCheckResult": { + "type": "object", + "required": [ + "enough_approvals", + "approvals", + "required", + "reviewers" + ], + "properties": { + "enough_approvals": { + "type": "boolean" + }, + "approvals": { + "type": "integer", + "format": "int32" + }, + "required": { + "type": "integer", + "format": "int32" + }, + "reviewers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewerInfo" + } + } + } + }, + "ArchiveCachedResponse": { + "type": "object", + "required": [ + "commit_oid", + "format", + "cached" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "cached": { + "type": "boolean" + } + } + }, + "ArchiveEntryResponse": { + "type": "object", + "required": [ + "path", + "oid", + "size", + "mode" + ], + "properties": { + "path": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "mode": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "ArchiveInvalidateAllResponse": { + "type": "object", + "required": [ + "commit_oid", + "count" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "count": { + "type": "integer", + "minimum": 0 + } + } + }, + "ArchiveInvalidateResponse": { + "type": "object", + "required": [ + "commit_oid", + "format", + "invalidated" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "invalidated": { + "type": "boolean" + } + } + }, + "ArchiveListResponse": { + "type": "object", + "required": [ + "commit_oid", + "entries", + "total_entries" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ArchiveEntryResponse" + } + }, + "total_entries": { + "type": "integer", + "minimum": 0 + } + } + }, + "ArchiveResponse": { + "type": "object", + "required": [ + "commit_oid", + "format", + "size", + "data" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "data": { + "type": "string" + } + } + }, + "ArchiveSummaryResponse": { + "type": "object", + "required": [ + "commit_oid", + "format", + "total_entries", + "total_size" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "format": { + "type": "string" + }, + "total_entries": { + "type": "integer", + "minimum": 0 + }, + "total_size": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "AuditLogParams": { + "type": "object", + "required": [ + "action" + ], + "properties": { + "action": { + "type": "string" + }, + "details": {} + } + }, + "AuditLogResponse": { + "type": "object", + "required": [ + "id", + "project_uid", + "actor_uid", + "action" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "actor_uid": { + "type": "string", + "format": "uuid" + }, + "action": { + "type": "string" + }, + "details": { + "type": "object" + } + } + }, + "BillingRecord": { + "type": "object", + "description": "Breakdown of a billing record.", + "required": [ + "cost", + "currency", + "input_tokens", + "output_tokens" + ], + "properties": { + "cost": { + "type": "number", + "format": "double", + "description": "Total cost in the billing currency." + }, + "currency": { + "type": "string" + }, + "input_tokens": { + "type": "integer", + "format": "int64" + }, + "output_tokens": { + "type": "integer", + "format": "int64" + } + } + }, + "BlameHunkResponse": { + "type": "object", + "required": [ + "commit_oid", + "final_start_line", + "final_lines", + "orig_start_line", + "orig_lines", + "boundary" + ], + "properties": { + "commit_oid": { + "type": "string" + }, + "final_start_line": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "final_lines": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "orig_start_line": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "orig_lines": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "boundary": { + "type": "boolean" + }, + "orig_path": { + "type": [ + "string", + "null" + ] + } + } + }, + "BlobContentResponse": { + "type": "object", + "required": [ + "oid", + "size", + "is_binary", + "content" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + }, + "content": { + "type": "string" + } + } + }, + "BlobCreateRequest": { + "type": "object", + "required": [ + "data" + ], + "properties": { + "data": { + "type": "string" + } + } + }, + "BlobCreateResponse": { + "type": "object", + "required": [ + "oid", + "size" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + } + } + }, + "BlobExistsResponse": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "BlobInfoResponse": { + "type": "object", + "required": [ + "oid", + "size", + "is_binary" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + }, + "BlobIsBinaryResponse": { + "type": "object", + "required": [ + "oid", + "is_binary" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_binary": { + "type": "boolean" + } + } + }, + "BlobSizeResponse": { + "type": "object", + "required": [ + "oid", + "size" + ], + "properties": { + "oid": { + "type": "string" + }, + "size": { + "type": "integer", + "minimum": 0 + } + } + }, + "BoardResponse": { + "type": "object", + "required": [ + "id", + "project", + "name", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "BoardWithColumnsResponse": { + "type": "object", + "required": [ + "board", + "columns" + ], + "properties": { + "board": { + "$ref": "#/components/schemas/BoardResponse" + }, + "columns": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ColumnWithCardsResponse" + } + } + } + }, + "BranchCreateRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + }, + "force": { + "type": "boolean" + } + } + }, + "BranchDiffResponse": { + "type": "object", + "required": [ + "ahead", + "behind", + "diverged" + ], + "properties": { + "ahead": { + "type": "integer", + "minimum": 0 + }, + "behind": { + "type": "integer", + "minimum": 0 + }, + "diverged": { + "type": "boolean" + } + } + }, + "BranchExistsResponse": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "BranchFastForwardResponse": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + }, + "BranchInfoResponse": { + "type": "object", + "required": [ + "name", + "oid", + "is_head", + "is_remote", + "is_current" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "is_head": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_current": { + "type": "boolean" + }, + "upstream": { + "type": [ + "string", + "null" + ] + } + } + }, + "BranchIsAncestorResponse": { + "type": "object", + "required": [ + "child", + "ancestor", + "is_ancestor" + ], + "properties": { + "child": { + "type": "string" + }, + "ancestor": { + "type": "string" + }, + "is_ancestor": { + "type": "boolean" + } + } + }, + "BranchIsConflictedResponse": { + "type": "object", + "required": [ + "is_conflicted" + ], + "properties": { + "is_conflicted": { + "type": "boolean" + } + } + }, + "BranchIsDetachedResponse": { + "type": "object", + "required": [ + "is_detached" + ], + "properties": { + "is_detached": { + "type": "boolean" + } + } + }, + "BranchIsHeadResponse": { + "type": "object", + "required": [ + "name", + "is_head" + ], + "properties": { + "name": { + "type": "string" + }, + "is_head": { + "type": "boolean" + } + } + }, + "BranchIsMergedResponse": { + "type": "object", + "required": [ + "branch", + "into", + "is_merged" + ], + "properties": { + "branch": { + "type": "string" + }, + "into": { + "type": "string" + }, + "is_merged": { + "type": "boolean" + } + } + }, + "BranchMergeBaseResponse": { + "type": "object", + "required": [ + "branch1", + "branch2", + "base" + ], + "properties": { + "branch1": { + "type": "string" + }, + "branch2": { + "type": "string" + }, + "base": { + "type": "string" + } + } + }, + "BranchMoveRequest": { + "type": "object", + "required": [ + "name", + "new_name" + ], + "properties": { + "name": { + "type": "string" + }, + "new_name": { + "type": "string" + }, + "force": { + "type": "boolean" + } + } + }, + "BranchProtectionCreateRequest": { + "type": "object", + "required": [ + "branch" + ], + "properties": { + "branch": { + "type": "string" + }, + "forbid_push": { + "type": "boolean" + }, + "forbid_pull": { + "type": "boolean" + }, + "forbid_merge": { + "type": "boolean" + }, + "forbid_deletion": { + "type": "boolean" + }, + "forbid_force_push": { + "type": "boolean" + }, + "forbid_tag_push": { + "type": "boolean" + }, + "required_approvals": { + "type": "integer", + "format": "int32" + }, + "dismiss_stale_reviews": { + "type": "boolean" + }, + "require_linear_history": { + "type": "boolean" + }, + "allow_fork_syncing": { + "type": "boolean" + } + } + }, + "BranchProtectionResponse": { + "type": "object", + "required": [ + "id", + "repo", + "branch", + "forbid_push", + "forbid_pull", + "forbid_merge", + "forbid_deletion", + "forbid_force_push", + "forbid_tag_push", + "required_approvals", + "dismiss_stale_reviews", + "require_linear_history", + "allow_fork_syncing" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "branch": { + "type": "string" + }, + "forbid_push": { + "type": "boolean" + }, + "forbid_pull": { + "type": "boolean" + }, + "forbid_merge": { + "type": "boolean" + }, + "forbid_deletion": { + "type": "boolean" + }, + "forbid_force_push": { + "type": "boolean" + }, + "forbid_tag_push": { + "type": "boolean" + }, + "required_approvals": { + "type": "integer", + "format": "int32" + }, + "dismiss_stale_reviews": { + "type": "boolean" + }, + "require_linear_history": { + "type": "boolean" + }, + "allow_fork_syncing": { + "type": "boolean" + } + } + }, + "BranchProtectionUpdateRequest": { + "type": "object", + "properties": { + "branch": { + "type": [ + "string", + "null" + ] + }, + "forbid_push": { + "type": [ + "boolean", + "null" + ] + }, + "forbid_pull": { + "type": [ + "boolean", + "null" + ] + }, + "forbid_merge": { + "type": [ + "boolean", + "null" + ] + }, + "forbid_deletion": { + "type": [ + "boolean", + "null" + ] + }, + "forbid_force_push": { + "type": [ + "boolean", + "null" + ] + }, + "forbid_tag_push": { + "type": [ + "boolean", + "null" + ] + }, + "required_approvals": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dismiss_stale_reviews": { + "type": [ + "boolean", + "null" + ] + }, + "require_linear_history": { + "type": [ + "boolean", + "null" + ] + }, + "allow_fork_syncing": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "BranchRenameRequest": { + "type": "object", + "required": [ + "old_name", + "new_name" + ], + "properties": { + "old_name": { + "type": "string" + }, + "new_name": { + "type": "string" + } + } + }, + "BranchSetUpstreamRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "upstream": { + "type": [ + "string", + "null" + ] + } + } + }, + "BranchSummaryResponse": { + "type": "object", + "required": [ + "local_count", + "remote_count", + "all_count" + ], + "properties": { + "local_count": { + "type": "integer", + "minimum": 0 + }, + "remote_count": { + "type": "integer", + "minimum": 0 + }, + "all_count": { + "type": "integer", + "minimum": 0 + } + } + }, + "BranchTrackingDiffResponse": { + "type": "object", + "required": [ + "name", + "ahead", + "behind" + ], + "properties": { + "name": { + "type": "string" + }, + "ahead": { + "type": "integer", + "minimum": 0 + }, + "behind": { + "type": "integer", + "minimum": 0 + } + } + }, + "CaptchaQuery": { + "type": "object", + "required": [ + "w", + "h", + "dark", + "rsa" + ], + "properties": { + "w": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "h": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "dark": { + "type": "boolean" + }, + "rsa": { + "type": "boolean" + } + } + }, + "CaptchaResponse": { + "type": "object", + "required": [ + "base64", + "req" + ], + "properties": { + "base64": { + "type": "string" + }, + "rsa": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/RsaResponse" + } + ] + }, + "req": { + "$ref": "#/components/schemas/CaptchaQuery" + } + } + }, + "CardResponse": { + "type": "object", + "required": [ + "id", + "column", + "title", + "position", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "column": { + "type": "string", + "format": "uuid" + }, + "issue_id": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "project": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "position": { + "type": "integer", + "format": "int32" + }, + "assignee_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "due_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "priority": { + "type": [ + "string", + "null" + ] + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ChangePasswordParams": { + "type": "object", + "required": [ + "old_password", + "new_password" + ], + "properties": { + "old_password": { + "type": "string" + }, + "new_password": { + "type": "string" + } + } + }, + "ColumnResponse": { + "type": "object", + "required": [ + "id", + "board", + "name", + "position" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "board": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "wip_limit": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "color": { + "type": [ + "string", + "null" + ] + } + } + }, + "ColumnWithCardsResponse": { + "type": "object", + "required": [ + "column", + "cards" + ], + "properties": { + "column": { + "$ref": "#/components/schemas/ColumnResponse" + }, + "cards": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CardResponse" + } + } + } + }, + "CommentCreated": { + "type": "object", + "required": [ + "path", + "severity" + ], + "properties": { + "path": { + "type": "string" + }, + "line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "severity": { + "type": "string" + } + } + }, + "CommitAmendRequest": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + }, + "author_name": { + "type": [ + "string", + "null" + ] + }, + "author_email": { + "type": [ + "string", + "null" + ] + }, + "committer_name": { + "type": [ + "string", + "null" + ] + }, + "committer_email": { + "type": [ + "string", + "null" + ] + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "message_encoding": { + "type": [ + "string", + "null" + ] + }, + "tree_id": { + "type": [ + "string", + "null" + ] + }, + "update_ref": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitAuthorResponse": { + "type": "object", + "required": [ + "oid", + "author" + ], + "properties": { + "oid": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + } + } + }, + "CommitBranchesResponse": { + "type": "object", + "properties": { + "data": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + } + } + }, + "CommitCherryPickAbortRequest": { + "type": "object", + "properties": { + "reset_type": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitCherryPickRequest": { + "type": "object", + "required": [ + "cherrypick_oid", + "author_name", + "author_email", + "committer_name", + "committer_email" + ], + "properties": { + "cherrypick_oid": { + "type": "string" + }, + "author_name": { + "type": "string" + }, + "author_email": { + "type": "string" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "mainline": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "update_ref": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitCountResponse": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitCreateRequest": { + "type": "object", + "required": [ + "author_name", + "author_email", + "committer_name", + "committer_email", + "message", + "tree_id", + "parent_ids" + ], + "properties": { + "author_name": { + "type": "string" + }, + "author_email": { + "type": "string" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "message": { + "type": "string" + }, + "tree_id": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "update_ref": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitCreateResponse": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + }, + "CommitExistsResponse": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "CommitGraphLineResponse": { + "type": "object", + "required": [ + "oid", + "graph_chars", + "refs", + "short_message" + ], + "properties": { + "oid": { + "type": "string" + }, + "graph_chars": { + "type": "string" + }, + "refs": { + "type": "string" + }, + "short_message": { + "type": "string" + } + } + }, + "CommitGraphReactCommit": { + "type": "object", + "required": [ + "oid", + "hash_abbrev", + "subject", + "author_name", + "author_email", + "author_timestamp", + "author_time_offset", + "committer_name", + "committer_email", + "committer_timestamp", + "committer_time_offset", + "parent_hashes", + "lane_index", + "graph_chars", + "refs", + "tags", + "branches" + ], + "properties": { + "oid": { + "type": "string" + }, + "hash_abbrev": { + "type": "string" + }, + "subject": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "author_name": { + "type": "string" + }, + "author_email": { + "type": "string" + }, + "author_timestamp": { + "type": "integer", + "format": "int64" + }, + "author_time_offset": { + "type": "integer", + "format": "int32" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "committer_timestamp": { + "type": "integer", + "format": "int64" + }, + "committer_time_offset": { + "type": "integer", + "format": "int32" + }, + "parent_hashes": { + "type": "array", + "items": { + "type": "string" + } + }, + "encoding": { + "type": [ + "string", + "null" + ] + }, + "lane_index": { + "type": "integer", + "description": "0-based lane index used to assign branch color.", + "minimum": 0 + }, + "graph_chars": { + "type": "string", + "description": "Raw ASCII graph characters for supplementary rendering." + }, + "refs": { + "type": "string", + "description": "Parsed refs string (e.g. \"main, v1.0.0\")." + }, + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tag names present on this commit." + }, + "branches": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Branch names this commit is the tip of." + } + } + }, + "CommitGraphReactResponse": { + "type": "object", + "required": [ + "commits", + "lanes", + "max_parents" + ], + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitGraphReactCommit" + } + }, + "lanes": { + "type": "array", + "items": { + "$ref": "#/components/schemas/LaneInfo" + } + }, + "max_parents": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitGraphResponse": { + "type": "object", + "required": [ + "lines", + "max_parents" + ], + "properties": { + "lines": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitGraphLineResponse" + } + }, + "max_parents": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitIsCommitResponse": { + "type": "object", + "required": [ + "oid", + "is_commit" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_commit": { + "type": "boolean" + } + } + }, + "CommitIsMergeResponse": { + "type": "object", + "required": [ + "oid", + "is_merge" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_merge": { + "type": "boolean" + } + } + }, + "CommitIsTipResponse": { + "type": "object", + "required": [ + "oid", + "is_tip" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_tip": { + "type": "boolean" + } + } + }, + "CommitLogResponse": { + "type": "object", + "required": [ + "data", + "total", + "page", + "per_page", + "total_pages" + ], + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommitMetaResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + }, + "page": { + "type": "integer", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "minimum": 0 + }, + "total_pages": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitMessageResponse": { + "type": "object", + "required": [ + "oid", + "message" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "CommitMetaResponse": { + "type": "object", + "required": [ + "oid", + "message", + "summary", + "author", + "committer", + "tree_id", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "message": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "committer": { + "$ref": "#/components/schemas/CommitSignatureResponse" + }, + "tree_id": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "encoding": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitParentCountResponse": { + "type": "object", + "required": [ + "oid", + "parent_count" + ], + "properties": { + "oid": { + "type": "string" + }, + "parent_count": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitParentIdsResponse": { + "type": "object", + "required": [ + "oid", + "parent_ids" + ], + "properties": { + "oid": { + "type": "string" + }, + "parent_ids": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "CommitRefCountResponse": { + "type": "object", + "required": [ + "oid", + "ref_count" + ], + "properties": { + "oid": { + "type": "string" + }, + "ref_count": { + "type": "integer", + "minimum": 0 + } + } + }, + "CommitRefInfoResponse": { + "type": "object", + "required": [ + "name", + "target", + "is_remote", + "is_tag" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": "string" + }, + "is_remote": { + "type": "boolean" + }, + "is_tag": { + "type": "boolean" + } + } + }, + "CommitReflogEntryResponse": { + "type": "object", + "required": [ + "new_sha", + "old_sha", + "committer_name", + "committer_email", + "time_secs", + "ref_name" + ], + "properties": { + "new_sha": { + "type": "string" + }, + "old_sha": { + "type": "string" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "time_secs": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "ref_name": { + "type": "string" + } + } + }, + "CommitRevertAbortRequest": { + "type": "object", + "properties": { + "reset_type": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitRevertRequest": { + "type": "object", + "required": [ + "revert_oid", + "author_name", + "author_email", + "committer_name", + "committer_email" + ], + "properties": { + "revert_oid": { + "type": "string" + }, + "author_name": { + "type": "string" + }, + "author_email": { + "type": "string" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "mainline": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "update_ref": { + "type": [ + "string", + "null" + ] + } + } + }, + "CommitShortIdResponse": { + "type": "object", + "required": [ + "oid", + "short_id" + ], + "properties": { + "oid": { + "type": "string" + }, + "short_id": { + "type": "string" + } + } + }, + "CommitSignatureResponse": { + "type": "object", + "required": [ + "name", + "email", + "time_secs", + "offset_minutes" + ], + "properties": { + "name": { + "type": "string" + }, + "email": { + "type": "string" + }, + "time_secs": { + "type": "integer", + "format": "int64" + }, + "offset_minutes": { + "type": "integer", + "format": "int32" + } + } + }, + "CommitSummaryResponse": { + "type": "object", + "required": [ + "oid", + "summary" + ], + "properties": { + "oid": { + "type": "string" + }, + "summary": { + "type": "string" + } + } + }, + "CommitTagsResponse": { + "type": "object", + "properties": { + "data": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + } + } + }, + "CommitTreeIdResponse": { + "type": "object", + "required": [ + "oid", + "tree_id" + ], + "properties": { + "oid": { + "type": "string" + }, + "tree_id": { + "type": "string" + } + } + }, + "ConfigBoolResponse": { + "type": "object", + "required": [ + "key", + "value" + ], + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "boolean" + } + } + }, + "ConfigEntryResponse": { + "type": "object", + "required": [ + "name", + "value" + ], + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + } + } + }, + "ConfigSetRequest": { + "type": "object", + "required": [ + "key", + "value" + ], + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + } + }, + "ConfigSnapshotResponse": { + "type": "object", + "required": [ + "entries" + ], + "properties": { + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ConfigEntryResponse" + } + } + } + }, + "ContextMe": { + "type": "object", + "required": [ + "uid", + "username", + "has_unread_notifications" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "has_unread_notifications": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "ContributionHeatmapItem": { + "type": "object", + "required": [ + "date", + "count" + ], + "properties": { + "date": { + "type": "string" + }, + "count": { + "type": "integer", + "format": "int32" + } + } + }, + "ContributionHeatmapQuery": { + "type": "object", + "properties": { + "start_date": { + "type": [ + "string", + "null" + ] + }, + "end_date": { + "type": [ + "string", + "null" + ] + } + } + }, + "ContributionHeatmapResponse": { + "type": "object", + "required": [ + "username", + "total_contributions", + "heatmap", + "start_date", + "end_date" + ], + "properties": { + "username": { + "type": "string" + }, + "total_contributions": { + "type": "integer", + "format": "int64" + }, + "heatmap": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ContributionHeatmapItem" + } + }, + "start_date": { + "type": "string" + }, + "end_date": { + "type": "string" + } + } + }, + "ContributorStats": { + "type": "object", + "required": [ + "name", + "email", + "commits" + ], + "properties": { + "name": { + "type": "string" + }, + "email": { + "type": "string" + }, + "commits": { + "type": "integer", + "minimum": 0 + }, + "first_commit_at": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "last_commit_at": { + "type": [ + "integer", + "null" + ], + "format": "int64" + } + } + }, + "ContributorsResponse": { + "type": "object", + "required": [ + "total", + "contributors" + ], + "properties": { + "total": { + "type": "integer", + "minimum": 0 + }, + "contributors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ContributorStats" + } + } + } + }, + "CreateAccessKeyParams": { + "type": "object", + "required": [ + "name", + "scopes" + ], + "properties": { + "name": { + "type": "string" + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + } + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "CreateBoardParams": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "CreateCardParams": { + "type": "object", + "required": [ + "column_id", + "title" + ], + "properties": { + "column_id": { + "type": "string", + "format": "uuid" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "issue_id": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "assignee_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "due_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "priority": { + "type": [ + "string", + "null" + ] + } + } + }, + "CreateColumnParams": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "wip_limit": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "color": { + "type": [ + "string", + "null" + ] + } + } + }, + "CreateLabelParams": { + "type": "object", + "required": [ + "name", + "color" + ], + "properties": { + "name": { + "type": "string" + }, + "color": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "CreateLabelRequest": { + "type": "object", + "required": [ + "name", + "color" + ], + "properties": { + "name": { + "type": "string" + }, + "color": { + "type": "string" + } + } + }, + "CreateModelCapabilityRequest": { + "type": "object", + "required": [ + "model_version_id", + "capability" + ], + "properties": { + "model_version_id": { + "type": "integer", + "format": "int64" + }, + "capability": { + "type": "string" + }, + "is_supported": { + "type": "boolean" + } + } + }, + "CreateModelParameterProfileRequest": { + "type": "object", + "required": [ + "model_version_id", + "temperature_min", + "temperature_max", + "top_p_min", + "top_p_max" + ], + "properties": { + "model_version_id": { + "type": "string", + "format": "uuid" + }, + "temperature_min": { + "type": "number", + "format": "double" + }, + "temperature_max": { + "type": "number", + "format": "double" + }, + "top_p_min": { + "type": "number", + "format": "double" + }, + "top_p_max": { + "type": "number", + "format": "double" + }, + "frequency_penalty_supported": { + "type": "boolean" + }, + "presence_penalty_supported": { + "type": "boolean" + } + } + }, + "CreateModelPricingRequest": { + "type": "object", + "required": [ + "model_version_id", + "input_price_per_1k_tokens", + "output_price_per_1k_tokens", + "currency", + "effective_from" + ], + "properties": { + "model_version_id": { + "type": "string", + "format": "uuid" + }, + "input_price_per_1k_tokens": { + "type": "string" + }, + "output_price_per_1k_tokens": { + "type": "string" + }, + "currency": { + "type": "string" + }, + "effective_from": { + "type": "string", + "format": "date-time" + } + } + }, + "CreateModelRequest": { + "type": "object", + "required": [ + "provider_id", + "name", + "modality", + "capability", + "context_length" + ], + "properties": { + "provider_id": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "modality": { + "type": "string" + }, + "capability": { + "type": "string" + }, + "context_length": { + "type": "integer", + "format": "int64" + }, + "max_output_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "training_cutoff": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_open_source": { + "type": "boolean" + } + } + }, + "CreateModelVersionRequest": { + "type": "object", + "required": [ + "model_id", + "version" + ], + "properties": { + "model_id": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": "string" + }, + "release_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "change_log": { + "type": [ + "string", + "null" + ] + }, + "is_default": { + "type": "boolean" + } + } + }, + "CreateProviderRequest": { + "type": "object", + "required": [ + "name", + "display_name" + ], + "properties": { + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "website": { + "type": [ + "string", + "null" + ] + } + } + }, + "CreateSkillRequest": { + "type": "object", + "required": [ + "slug", + "content" + ], + "properties": { + "slug": { + "type": "string" + }, + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": "string" + }, + "metadata": {} + } + }, + "CreateWebhookParams": { + "type": "object", + "required": [ + "url" + ], + "properties": { + "url": { + "type": "string" + }, + "content_type": { + "type": [ + "string", + "null" + ] + }, + "secret": { + "type": [ + "string", + "null" + ] + }, + "insecure_ssl": { + "type": [ + "boolean", + "null" + ] + }, + "events": { + "$ref": "#/components/schemas/WebhookEvent" + }, + "active": { + "type": "boolean" + } + } + }, + "DeleteSkillResponse": { + "type": "object", + "required": [ + "deleted", + "slug" + ], + "properties": { + "deleted": { + "type": "boolean" + }, + "slug": { + "type": "string" + } + } + }, + "DescriptionQuery": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + } + } + }, + "DescriptionResponse": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + } + } + }, + "DiffDeltaResponse": { + "type": "object", + "required": [ + "status", + "old_file", + "new_file", + "nfiles", + "hunks", + "lines" + ], + "properties": { + "status": { + "type": "string" + }, + "old_file": { + "$ref": "#/components/schemas/DiffFileResponse" + }, + "new_file": { + "$ref": "#/components/schemas/DiffFileResponse" + }, + "nfiles": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "hunks": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DiffHunkResponse" + } + }, + "lines": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DiffLineResponse" + } + } + } + }, + "DiffFileResponse": { + "type": "object", + "required": [ + "size", + "is_binary" + ], + "properties": { + "oid": { + "type": [ + "string", + "null" + ] + }, + "path": { + "type": [ + "string", + "null" + ] + }, + "size": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + }, + "DiffHunkResponse": { + "type": "object", + "required": [ + "old_start", + "old_lines", + "new_start", + "new_lines", + "header" + ], + "properties": { + "old_start": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "old_lines": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "new_start": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "new_lines": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "header": { + "type": "string" + } + } + }, + "DiffLineResponse": { + "type": "object", + "required": [ + "content", + "origin", + "num_lines", + "content_offset" + ], + "properties": { + "content": { + "type": "string" + }, + "origin": { + "type": "string" + }, + "old_lineno": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "new_lineno": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "num_lines": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "content_offset": { + "type": "integer", + "format": "int64" + } + } + }, + "DiffPatchIdResponse": { + "type": "object", + "required": [ + "old_tree", + "new_tree", + "patch_id" + ], + "properties": { + "old_tree": { + "type": "string" + }, + "new_tree": { + "type": "string" + }, + "patch_id": { + "type": "string" + } + } + }, + "DiffResultResponse": { + "type": "object", + "required": [ + "stats", + "deltas" + ], + "properties": { + "stats": { + "$ref": "#/components/schemas/DiffStatsResponse" + }, + "deltas": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DiffDeltaResponse" + } + } + } + }, + "DiffStatsResponse": { + "type": "object", + "required": [ + "files_changed", + "insertions", + "deletions" + ], + "properties": { + "files_changed": { + "type": "integer", + "minimum": 0 + }, + "insertions": { + "type": "integer", + "minimum": 0 + }, + "deletions": { + "type": "integer", + "minimum": 0 + } + } + }, + "Disable2FAParams": { + "type": "object", + "required": [ + "code", + "password" + ], + "properties": { + "code": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "EmailChangeRequest": { + "type": "object", + "required": [ + "new_email", + "password" + ], + "properties": { + "new_email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "EmailResponse": { + "type": "object", + "properties": { + "email": { + "type": [ + "string", + "null" + ] + } + } + }, + "EmailVerifyRequest": { + "type": "object", + "required": [ + "token" + ], + "properties": { + "token": { + "type": "string" + } + } + }, + "Enable2FAResponse": { + "type": "object", + "required": [ + "secret", + "qr_code", + "backup_codes" + ], + "properties": { + "secret": { + "type": "string" + }, + "qr_code": { + "type": "string" + }, + "backup_codes": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "ExchangeProjectName": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + } + } + }, + "ExchangeProjectTitle": { + "type": "object", + "properties": { + "display_name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "ExchangeProjectVisibility": { + "type": "object", + "required": [ + "is_public" + ], + "properties": { + "is_public": { + "type": "boolean" + } + } + }, + "GeneratePrDescriptionRequest": { + "type": "object", + "description": "Request body for generating a PR description.", + "properties": { + "pr_number": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "description": "PR number to generate description for." + }, + "model_id": { + "type": [ + "string", + "null" + ], + "format": "uuid", + "description": "Override the default AI model for this generation." + } + } + }, + "GeneratePrDescriptionResponse": { + "type": "object", + "description": "Response from the AI description generation endpoint.", + "required": [ + "description", + "markdown_body" + ], + "properties": { + "description": { + "$ref": "#/components/schemas/PrDescription" + }, + "markdown_body": { + "type": "string", + "description": "Markdown-formatted description ready to paste into the PR body." + }, + "billing": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/BillingRecord" + } + ] + } + } + }, + "Get2FAStatusResponse": { + "type": "object", + "required": [ + "is_enabled", + "has_backup_codes" + ], + "properties": { + "is_enabled": { + "type": "boolean" + }, + "method": { + "type": [ + "string", + "null" + ] + }, + "has_backup_codes": { + "type": "boolean" + } + } + }, + "GitInitRequest": { + "type": "object", + "required": [ + "path" + ], + "properties": { + "path": { + "type": "string" + }, + "bare": { + "type": "boolean" + }, + "initial_branch": { + "type": [ + "string", + "null" + ] + } + } + }, + "GitInitResponse": { + "type": "object", + "required": [ + "path", + "is_bare" + ], + "properties": { + "path": { + "type": "string" + }, + "is_bare": { + "type": "boolean" + } + } + }, + "GitReadmeQuery": { + "type": "object", + "properties": { + "ref": { + "type": [ + "string", + "null" + ] + } + } + }, + "GitReadmeResponse": { + "type": "object", + "properties": { + "path": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "size": { + "type": [ + "integer", + "null" + ], + "minimum": 0 + }, + "encoding": { + "type": [ + "string", + "null" + ] + }, + "truncated": { + "type": "boolean" + }, + "is_binary": { + "type": "boolean" + } + } + }, + "GitUpdateRepoRequest": { + "type": "object", + "properties": { + "default_branch": { + "type": [ + "string", + "null" + ] + } + } + }, + "GitWatchRequest": { + "type": "object", + "properties": { + "show_dashboard": { + "type": "boolean" + }, + "notify_email": { + "type": "boolean" + } + } + }, + "InvitationListResponse": { + "type": "object", + "required": [ + "invitations", + "total", + "page", + "per_page" + ], + "properties": { + "invitations": { + "type": "array", + "items": { + "$ref": "#/components/schemas/InvitationResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "InvitationResponse": { + "type": "object", + "required": [ + "project_uid", + "user_uid", + "invited_by", + "scope", + "accepted", + "rejected", + "created_at" + ], + "properties": { + "project_uid": { + "type": "string", + "format": "uuid" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "invited_by": { + "type": "string", + "format": "uuid" + }, + "scope": { + "type": "string" + }, + "accepted": { + "type": "boolean" + }, + "accepted_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "rejected": { + "type": "boolean" + }, + "rejected_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "InviteUserRequest": { + "type": "object", + "required": [ + "email", + "scope" + ], + "properties": { + "email": { + "type": "string" + }, + "scope": { + "$ref": "#/components/schemas/MemberRole" + } + } + }, + "IsLikeResponse": { + "type": "object", + "required": [ + "is_like" + ], + "properties": { + "is_like": { + "type": "boolean" + } + } + }, + "IsWatchResponse": { + "type": "object", + "required": [ + "is_watching" + ], + "properties": { + "is_watching": { + "type": "boolean" + } + } + }, + "IssueAddLabelRequest": { + "type": "object", + "required": [ + "label_id" + ], + "properties": { + "label_id": { + "type": "integer", + "format": "int64" + } + } + }, + "IssueAssignUserRequest": { + "type": "object", + "required": [ + "user_id" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + } + } + }, + "IssueAssigneeResponse": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "assigned_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "assigned_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueCommentCreateRequest": { + "type": "object", + "required": [ + "body" + ], + "properties": { + "body": { + "type": "string" + } + } + }, + "IssueCommentListResponse": { + "type": "object", + "required": [ + "comments", + "total", + "page", + "per_page" + ], + "properties": { + "comments": { + "type": "array", + "items": { + "$ref": "#/components/schemas/IssueCommentResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + }, + "IssueCommentResponse": { + "type": "object", + "required": [ + "id", + "issue", + "author", + "author_username", + "body", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "issue": { + "type": "string", + "format": "uuid" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": "string" + }, + "body": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueCommentUpdateRequest": { + "type": "object", + "required": [ + "body" + ], + "properties": { + "body": { + "type": "string" + } + } + }, + "IssueCreateRequest": { + "type": "object", + "required": [ + "title" + ], + "properties": { + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "milestone": { + "type": [ + "string", + "null" + ] + } + } + }, + "IssueLabelResponse": { + "type": "object", + "required": [ + "issue", + "label_id", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "label_id": { + "type": "integer", + "format": "int64" + }, + "label_name": { + "type": [ + "string", + "null" + ] + }, + "label_color": { + "type": [ + "string", + "null" + ] + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueLinkPullRequestRequest": { + "type": "object", + "required": [ + "repo", + "number" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + } + } + }, + "IssueLinkRepoRequest": { + "type": "object", + "required": [ + "repo" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + } + } + }, + "IssueListResponse": { + "type": "object", + "required": [ + "issues", + "total", + "page", + "per_page" + ], + "properties": { + "issues": { + "type": "array", + "items": { + "$ref": "#/components/schemas/IssueResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + }, + "IssuePullRequestResponse": { + "type": "object", + "required": [ + "issue", + "repo", + "number", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueRepoResponse": { + "type": "object", + "required": [ + "issue", + "repo", + "relation_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "repo": { + "type": "string", + "format": "uuid" + }, + "relation_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueResponse": { + "type": "object", + "required": [ + "id", + "project", + "number", + "title", + "state", + "author", + "created_at", + "updated_at", + "created_by_ai" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "milestone": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "closed_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_by_ai": { + "type": "boolean" + } + } + }, + "IssueSearchItem": { + "type": "object", + "required": [ + "uid", + "number", + "title", + "state", + "project_uid", + "project_name", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "project_name": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueSubscriberResponse": { + "type": "object", + "required": [ + "issue", + "user_id", + "username", + "subscribed", + "created_at" + ], + "properties": { + "issue": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "subscribed": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "IssueSummaryResponse": { + "type": "object", + "required": [ + "total", + "open", + "closed" + ], + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "open": { + "type": "integer", + "format": "int64" + }, + "closed": { + "type": "integer", + "format": "int64" + } + } + }, + "IssueUpdateRequest": { + "type": "object", + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "milestone": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": [ + "string", + "null" + ] + } + } + }, + "JoinAnswerResponse": { + "type": "object", + "required": [ + "question", + "answer", + "created_at" + ], + "properties": { + "question": { + "type": "string" + }, + "answer": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "JoinAnswersListResponse": { + "type": "object", + "required": [ + "request_id", + "project_uid", + "answers" + ], + "properties": { + "request_id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string" + }, + "answers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/JoinAnswerResponse" + } + } + } + }, + "JoinRequestListResponse": { + "type": "object", + "required": [ + "requests", + "total", + "page", + "per_page" + ], + "properties": { + "requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/JoinRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "JoinRequestResponse": { + "type": "object", + "required": [ + "id", + "project_uid", + "user_uid", + "username", + "status", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "status": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "processed_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "processed_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "reject_reason": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "JoinSettingsResponse": { + "type": "object", + "required": [ + "project_uid", + "require_approval", + "require_questions", + "questions" + ], + "properties": { + "project_uid": { + "type": "string" + }, + "require_approval": { + "type": "boolean" + }, + "require_questions": { + "type": "boolean" + }, + "questions": {} + } + }, + "LabelListResponse": { + "type": "object", + "required": [ + "labels", + "total" + ], + "properties": { + "labels": { + "type": "array", + "items": { + "$ref": "#/components/schemas/LabelResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + }, + "LabelResponse": { + "type": "object", + "required": [ + "id", + "project_uid", + "name", + "color", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "color": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "LaneInfo": { + "type": "object", + "description": "Response for the gitgraph-react compatible API endpoint.", + "required": [ + "lane_index", + "start_oid" + ], + "properties": { + "lane_index": { + "type": "integer", + "description": "0-based lane index.", + "minimum": 0 + }, + "branch_name": { + "type": [ + "string", + "null" + ], + "description": "Branch name if this lane has a branch tip (None for unnamed lanes)." + }, + "start_oid": { + "type": "string", + "description": "SHA of the commit where this lane/branch starts." + }, + "end_oid": { + "type": [ + "string", + "null" + ], + "description": "SHA of the commit where this lane ends (None if lane continues to last commit)." + } + } + }, + "LikeUserInfo": { + "type": "object", + "required": [ + "uid", + "username", + "avatar_url" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": "string" + } + } + }, + "LoginParams": { + "type": "object", + "required": [ + "username", + "password", + "captcha" + ], + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + }, + "captcha": { + "type": "string" + }, + "totp_code": { + "type": [ + "string", + "null" + ] + } + } + }, + "MemberInfo": { + "type": "object", + "required": [ + "user_id", + "username", + "scope", + "joined_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "scope": { + "$ref": "#/components/schemas/MemberRole" + }, + "joined_at": { + "type": "string", + "format": "date-time" + } + } + }, + "MemberListResponse": { + "type": "object", + "required": [ + "members", + "total", + "page", + "per_page" + ], + "properties": { + "members": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MemberInfo" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "MemberRole": { + "type": "string", + "description": "Project member role. Stored as `\"owner\"`, `\"admin\"`, or `\"member\"` in the database.", + "enum": [ + "Owner", + "Admin", + "Member" + ] + }, + "MentionNotificationResponse": { + "type": "object", + "required": [ + "message_id", + "mentioned_by", + "mentioned_by_name", + "content_preview", + "room_id", + "room_name", + "created_at" + ], + "properties": { + "message_id": { + "type": "string", + "format": "uuid" + }, + "mentioned_by": { + "type": "string", + "format": "uuid" + }, + "mentioned_by_name": { + "type": "string" + }, + "content_preview": { + "type": "string" + }, + "room_id": { + "type": "string", + "format": "uuid" + }, + "room_name": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "MergeAnalysisResponse": { + "type": "object", + "required": [ + "can_fast_forward", + "is_up_to_date", + "is_normal", + "analysis_flags", + "supported_strategies" + ], + "properties": { + "can_fast_forward": { + "type": "boolean" + }, + "is_up_to_date": { + "type": "boolean" + }, + "is_normal": { + "type": "boolean" + }, + "analysis_flags": { + "type": "array", + "items": { + "type": "string" + } + }, + "supported_strategies": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Strategies supported given the current state of the PR." + } + } + }, + "MergeAnalysisResultInner": { + "type": "object", + "required": [ + "is_none", + "is_normal", + "is_up_to_date", + "is_fast_forward", + "is_unborn" + ], + "properties": { + "is_none": { + "type": "boolean" + }, + "is_normal": { + "type": "boolean" + }, + "is_up_to_date": { + "type": "boolean" + }, + "is_fast_forward": { + "type": "boolean" + }, + "is_unborn": { + "type": "boolean" + } + } + }, + "MergeCommitsRequest": { + "type": "object", + "required": [ + "local_oid", + "remote_oid" + ], + "properties": { + "local_oid": { + "type": "string" + }, + "remote_oid": { + "type": "string" + }, + "find_renames": { + "type": "boolean" + }, + "fail_on_conflict": { + "type": "boolean" + }, + "skip_reuc": { + "type": "boolean" + }, + "no_recursive": { + "type": "boolean" + }, + "rename_threshold": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "target_limit": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "recursion_limit": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "MergeConflictFile": { + "type": "object", + "required": [ + "path", + "status" + ], + "properties": { + "path": { + "type": "string" + }, + "status": { + "type": "string" + } + } + }, + "MergeConflictResponse": { + "type": "object", + "required": [ + "has_conflicts", + "conflicted_files" + ], + "properties": { + "has_conflicts": { + "type": "boolean" + }, + "conflicted_files": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MergeConflictFile" + } + } + } + }, + "MergePreferenceResultInner": { + "type": "object", + "required": [ + "is_none", + "is_no_fast_forward", + "is_fastforward_only" + ], + "properties": { + "is_none": { + "type": "boolean" + }, + "is_no_fast_forward": { + "type": "boolean" + }, + "is_fastforward_only": { + "type": "boolean" + } + } + }, + "MergeRequest": { + "type": "object", + "properties": { + "fast_forward": { + "type": "boolean" + }, + "strategy": { + "$ref": "#/components/schemas/MergeStrategy" + }, + "message": { + "type": "string" + } + } + }, + "MergeResponse": { + "type": "object", + "required": [ + "repo", + "number", + "status", + "merged_by", + "merged_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "status": { + "type": "string" + }, + "merged_by": { + "type": "string", + "format": "uuid" + }, + "merged_at": { + "type": "string", + "format": "date-time" + } + } + }, + "MergeStrategy": { + "type": "string", + "enum": [ + "mergecommit", + "squash", + "rebase" + ] + }, + "MergeTreesRequest": { + "type": "object", + "required": [ + "ancestor_oid", + "our_oid", + "their_oid" + ], + "properties": { + "ancestor_oid": { + "type": "string" + }, + "our_oid": { + "type": "string" + }, + "their_oid": { + "type": "string" + }, + "find_renames": { + "type": "boolean" + }, + "fail_on_conflict": { + "type": "boolean" + }, + "skip_reuc": { + "type": "boolean" + }, + "no_recursive": { + "type": "boolean" + }, + "rename_threshold": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "target_limit": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "recursion_limit": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "MergeheadInfoResponse": { + "type": "object", + "required": [ + "oid" + ], + "properties": { + "oid": { + "type": "string" + } + } + }, + "MessageEditHistoryEntry": { + "type": "object", + "required": [ + "old_content", + "new_content", + "edited_at" + ], + "properties": { + "old_content": { + "type": "string" + }, + "new_content": { + "type": "string" + }, + "edited_at": { + "type": "string", + "format": "date-time" + } + } + }, + "MessageEditHistoryResponse": { + "type": "object", + "required": [ + "message_id", + "history", + "total_edits" + ], + "properties": { + "message_id": { + "type": "string", + "format": "uuid" + }, + "history": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MessageEditHistoryEntry" + } + }, + "total_edits": { + "type": "integer", + "format": "int64" + } + } + }, + "MessageSearchResponse": { + "type": "object", + "required": [ + "messages", + "total" + ], + "properties": { + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/RoomMessageResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + }, + "ModelCapabilityResponse": { + "type": "object", + "required": [ + "id", + "model_version_id", + "capability", + "is_supported", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "model_version_id": { + "type": "integer", + "format": "int64" + }, + "capability": { + "type": "string" + }, + "is_supported": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ModelParameterProfileResponse": { + "type": "object", + "required": [ + "id", + "model_version_id", + "temperature_min", + "temperature_max", + "top_p_min", + "top_p_max", + "frequency_penalty_supported", + "presence_penalty_supported" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "model_version_id": { + "type": "string", + "format": "uuid" + }, + "temperature_min": { + "type": "number", + "format": "double" + }, + "temperature_max": { + "type": "number", + "format": "double" + }, + "top_p_min": { + "type": "number", + "format": "double" + }, + "top_p_max": { + "type": "number", + "format": "double" + }, + "frequency_penalty_supported": { + "type": "boolean" + }, + "presence_penalty_supported": { + "type": "boolean" + } + } + }, + "ModelPricingResponse": { + "type": "object", + "required": [ + "id", + "model_version_id", + "input_price_per_1k_tokens", + "output_price_per_1k_tokens", + "currency", + "effective_from" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "model_version_id": { + "type": "string", + "format": "uuid" + }, + "input_price_per_1k_tokens": { + "type": "string" + }, + "output_price_per_1k_tokens": { + "type": "string" + }, + "currency": { + "type": "string" + }, + "effective_from": { + "type": "string", + "format": "date-time" + } + } + }, + "ModelResponse": { + "type": "object", + "required": [ + "id", + "provider_id", + "name", + "modality", + "capability", + "context_length", + "is_open_source", + "status", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "provider_id": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "modality": { + "type": "string" + }, + "capability": { + "type": "string" + }, + "context_length": { + "type": "integer", + "format": "int64" + }, + "max_output_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "training_cutoff": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_open_source": { + "type": "boolean" + }, + "status": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ModelVersionResponse": { + "type": "object", + "required": [ + "id", + "model_id", + "version", + "is_default", + "status", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "model_id": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": "string" + }, + "release_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "change_log": { + "type": [ + "string", + "null" + ] + }, + "is_default": { + "type": "boolean" + }, + "status": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "MoveCardParams": { + "type": "object", + "required": [ + "target_column_id", + "position" + ], + "properties": { + "target_column_id": { + "type": "string", + "format": "uuid" + }, + "position": { + "type": "integer", + "format": "int32" + } + } + }, + "NotificationListResponse": { + "type": "object", + "required": [ + "notifications", + "total", + "unread_count" + ], + "properties": { + "notifications": { + "type": "array", + "items": { + "$ref": "#/components/schemas/NotificationResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "unread_count": { + "type": "integer", + "format": "int64" + } + } + }, + "NotificationPreferencesParams": { + "type": "object", + "properties": { + "email_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "in_app_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "push_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "digest_mode": { + "type": [ + "string", + "null" + ] + }, + "dnd_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "dnd_start_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "marketing_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "security_enabled": { + "type": [ + "boolean", + "null" + ] + }, + "product_enabled": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "NotificationPreferencesResponse": { + "type": "object", + "required": [ + "user_id", + "email_enabled", + "in_app_enabled", + "push_enabled", + "digest_mode", + "dnd_enabled", + "marketing_enabled", + "security_enabled", + "product_enabled", + "created_at", + "updated_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "email_enabled": { + "type": "boolean" + }, + "in_app_enabled": { + "type": "boolean" + }, + "push_enabled": { + "type": "boolean" + }, + "digest_mode": { + "type": "string" + }, + "dnd_enabled": { + "type": "boolean" + }, + "dnd_start_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_minute": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "marketing_enabled": { + "type": "boolean" + }, + "security_enabled": { + "type": "boolean" + }, + "product_enabled": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "NotificationResponse": { + "type": "object", + "required": [ + "id", + "notification_type", + "title", + "metadata", + "is_read", + "is_archived", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "room": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "project": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "user_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "user_info": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserInfo" + } + ] + }, + "notification_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "related_message_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "related_user_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "related_room_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "metadata": {}, + "is_read": { + "type": "boolean" + }, + "is_archived": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "read_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "NotificationType": { + "type": "string", + "enum": [ + "mention", + "invitation", + "role_change", + "room_created", + "room_deleted", + "system_announcement" + ] + }, + "Pager": { + "type": "object", + "required": [ + "page", + "par_page" + ], + "properties": { + "page": { + "type": "integer", + "format": "int64" + }, + "par_page": { + "type": "integer", + "format": "int64" + } + } + }, + "PendingInvitationInfo": { + "type": "object", + "required": [ + "user_id", + "username", + "role", + "invited_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "email": { + "type": [ + "string", + "null" + ] + }, + "role": { + "type": "string" + }, + "invited_by_username": { + "type": [ + "string", + "null" + ] + }, + "invited_at": { + "type": "string", + "format": "date-time" + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "PrCommitResponse": { + "type": "object", + "required": [ + "oid", + "short_oid", + "message", + "summary", + "author_name", + "author_email", + "authored_at", + "committer_name", + "committer_email", + "committed_at" + ], + "properties": { + "oid": { + "type": "string" + }, + "short_oid": { + "type": "string" + }, + "message": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "author_name": { + "type": "string" + }, + "author_email": { + "type": "string" + }, + "authored_at": { + "type": "string", + "format": "date-time" + }, + "committer_name": { + "type": "string" + }, + "committer_email": { + "type": "string" + }, + "committed_at": { + "type": "string", + "format": "date-time" + } + } + }, + "PrCommitsListResponse": { + "type": "object", + "required": [ + "commits" + ], + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PrCommitResponse" + } + } + } + }, + "PrDescription": { + "type": "object", + "description": "Structured PR description generated by AI.", + "required": [ + "summary", + "changes" + ], + "properties": { + "summary": { + "type": "string", + "description": "3-5 line summary of what this PR does." + }, + "changes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Key changes made in this PR." + }, + "risks": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Potential risks or things to watch out for." + }, + "tests": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Suggested test scenarios." + } + } + }, + "PreferencesParams": { + "type": "object", + "properties": { + "language": { + "type": [ + "string", + "null" + ] + }, + "theme": { + "type": [ + "string", + "null" + ] + }, + "timezone": { + "type": [ + "string", + "null" + ] + }, + "email_notifications": { + "type": [ + "boolean", + "null" + ] + }, + "in_app_notifications": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "PreferencesResponse": { + "type": "object", + "required": [ + "language", + "theme", + "timezone", + "email_notifications", + "in_app_notifications", + "created_at", + "updated_at" + ], + "properties": { + "language": { + "type": "string" + }, + "theme": { + "type": "string" + }, + "timezone": { + "type": "string" + }, + "email_notifications": { + "type": "boolean" + }, + "in_app_notifications": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProcessJoinRequest": { + "type": "object", + "required": [ + "approve", + "scope" + ], + "properties": { + "approve": { + "type": "boolean" + }, + "scope": { + "$ref": "#/components/schemas/MemberRole" + }, + "reject_reason": { + "type": [ + "string", + "null" + ] + } + } + }, + "ProfileResponse": { + "type": "object", + "required": [ + "uid", + "username", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "website_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "last_sign_in_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "ProjectBillingCurrentResponse": { + "type": "object", + "required": [ + "project_uid", + "currency", + "monthly_quota", + "balance", + "month_used", + "cycle_start_utc", + "cycle_end_utc", + "updated_at", + "created_at" + ], + "properties": { + "project_uid": { + "type": "string", + "format": "uuid" + }, + "currency": { + "type": "string" + }, + "monthly_quota": { + "type": "number", + "format": "double" + }, + "balance": { + "type": "number", + "format": "double" + }, + "month_used": { + "type": "number", + "format": "double" + }, + "cycle_start_utc": { + "type": "string", + "format": "date-time" + }, + "cycle_end_utc": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProjectBillingHistoryItem": { + "type": "object", + "required": [ + "uid", + "project_uid", + "amount", + "currency", + "reason", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "user_uid": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "amount": { + "type": "number", + "format": "double" + }, + "currency": { + "type": "string" + }, + "reason": { + "type": "string" + }, + "extra": {}, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProjectBillingHistoryQuery": { + "type": "object", + "properties": { + "page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + } + } + }, + "ProjectBillingHistoryResponse": { + "type": "object", + "required": [ + "page", + "per_page", + "total", + "list" + ], + "properties": { + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "list": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectBillingHistoryItem" + } + } + } + }, + "ProjectInfoKeyValue": { + "type": "object", + "required": [ + "key", + "value" + ], + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + } + }, + "ProjectInfoLabel": { + "type": "object", + "required": [ + "name", + "color" + ], + "properties": { + "name": { + "type": "string" + }, + "color": { + "type": "string" + } + } + }, + "ProjectInfoRelational": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at", + "created_by", + "created_username_name", + "member_count", + "like_count", + "watch_count", + "keys", + "labels", + "is_like", + "is_watch" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_username_name": { + "type": "string" + }, + "created_display_name": { + "type": [ + "string", + "null" + ] + }, + "created_avatar_url": { + "type": [ + "string", + "null" + ] + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "like_count": { + "type": "integer", + "format": "int64" + }, + "watch_count": { + "type": "integer", + "format": "int64" + }, + "keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectInfoKeyValue" + } + }, + "labels": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectInfoLabel" + } + }, + "role": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/MemberRole" + } + ] + }, + "is_like": { + "type": "boolean" + }, + "is_watch": { + "type": "boolean" + } + } + }, + "ProjectInitParams": { + "type": "object", + "required": [ + "name", + "is_public" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "workspace_slug": { + "type": [ + "string", + "null" + ], + "description": "Optional workspace slug to associate this project with." + } + } + }, + "ProjectInitResponse": { + "type": "object", + "required": [ + "params", + "project" + ], + "properties": { + "params": { + "$ref": "#/components/schemas/ProjectInitParams" + }, + "project": { + "$ref": "#/components/schemas/ProjectModel" + } + } + }, + "ProjectModel": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_by", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "workspace_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProjectRepoCreateParams": { + "type": "object", + "required": [ + "repo_name" + ], + "properties": { + "repo_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "init_repo": { + "type": "boolean", + "description": "Default: true. When false, skips bare git init and leaves default_branch empty;\nthe branch will be auto-detected and set on first push." + }, + "default_branch": { + "type": "string", + "description": "Only used when init_repo is true." + }, + "is_private": { + "type": "boolean" + } + } + }, + "ProjectRepoCreateResponse": { + "type": "object", + "required": [ + "uid", + "repo_name", + "default_branch", + "project_name", + "is_private", + "storage_path", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "repo_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "default_branch": { + "type": "string" + }, + "project_name": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "storage_path": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProjectRepositoryItem": { + "type": "object", + "required": [ + "uid", + "repo_name", + "default_branch", + "project_name", + "is_private", + "commit_count", + "branch_count", + "tag_count", + "star_count", + "watch_count" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "repo_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "default_branch": { + "type": "string" + }, + "project_name": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "commit_count": { + "type": "integer", + "format": "int64" + }, + "branch_count": { + "type": "integer", + "format": "int64" + }, + "tag_count": { + "type": "integer", + "format": "int64" + }, + "star_count": { + "type": "integer", + "format": "int64" + }, + "watch_count": { + "type": "integer", + "format": "int64" + }, + "last_commit_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "ProjectRepositoryPagination": { + "type": "object", + "required": [ + "items", + "total" + ], + "properties": { + "items": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProjectRepositoryItem" + } + }, + "cursor": { + "type": [ + "string", + "null" + ] + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "ProjectSearchItem": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ProviderResponse": { + "type": "object", + "required": [ + "id", + "name", + "display_name", + "status", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "website": { + "type": [ + "string", + "null" + ] + }, + "status": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "PullRequestCreateRequest": { + "type": "object", + "required": [ + "title", + "base", + "head" + ], + "properties": { + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "base": { + "type": "string" + }, + "head": { + "type": "string" + }, + "draft": { + "type": "boolean" + } + } + }, + "PullRequestListResponse": { + "type": "object", + "required": [ + "pull_requests", + "total", + "page", + "per_page" + ], + "properties": { + "pull_requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PullRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64" + }, + "per_page": { + "type": "integer", + "format": "int64" + } + } + }, + "PullRequestResponse": { + "type": "object", + "required": [ + "repo", + "number", + "title", + "author", + "base", + "head", + "status", + "created_at", + "updated_at", + "created_by_ai" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "issue": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "base": { + "type": "string" + }, + "head": { + "type": "string" + }, + "status": { + "type": "string" + }, + "merged_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "merged_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_by_ai": { + "type": "boolean" + } + } + }, + "PullRequestSummaryResponse": { + "type": "object", + "required": [ + "total", + "open", + "merged", + "closed" + ], + "properties": { + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "open": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "merged": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "closed": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "PullRequestUpdateRequest": { + "type": "object", + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "base": { + "type": [ + "string", + "null" + ] + }, + "draft": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "QuestionSchema": { + "type": "object", + "required": [ + "question" + ], + "properties": { + "question": { + "type": "string" + } + } + }, + "ReactionAddRequest": { + "type": "object", + "required": [ + "reaction" + ], + "properties": { + "reaction": { + "type": "string" + } + } + }, + "ReactionListResponse": { + "type": "object", + "required": [ + "reactions" + ], + "properties": { + "reactions": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReactionSummary" + } + } + } + }, + "ReactionResponse": { + "type": "object", + "required": [ + "user", + "reaction", + "created_at" + ], + "properties": { + "user": { + "type": "string", + "format": "uuid" + }, + "reaction": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ReactionSummary": { + "type": "object", + "required": [ + "reaction", + "count", + "users" + ], + "properties": { + "reaction": { + "type": "string" + }, + "count": { + "type": "integer", + "format": "int64" + }, + "users": { + "type": "array", + "items": { + "type": "string", + "format": "uuid" + } + } + } + }, + "RefCreateRequest": { + "type": "object", + "required": [ + "name", + "oid" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "force": { + "type": "boolean" + }, + "message": { + "type": [ + "string", + "null" + ] + } + } + }, + "RefDeleteResponse": { + "type": "object", + "required": [ + "name", + "oid" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + } + } + }, + "RefExistsResponse": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "RefInfoResponse": { + "type": "object", + "required": [ + "name", + "is_symbolic", + "is_branch", + "is_remote", + "is_tag", + "is_note" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + }, + "target": { + "type": [ + "string", + "null" + ] + }, + "is_symbolic": { + "type": "boolean" + }, + "is_branch": { + "type": "boolean" + }, + "is_remote": { + "type": "boolean" + }, + "is_tag": { + "type": "boolean" + }, + "is_note": { + "type": "boolean" + } + } + }, + "RefTargetResponse": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": [ + "string", + "null" + ] + } + } + }, + "RefUpdateRequest": { + "type": "object", + "required": [ + "name", + "new_oid" + ], + "properties": { + "name": { + "type": "string" + }, + "new_oid": { + "type": "string" + }, + "expected_oid": { + "type": [ + "string", + "null" + ] + }, + "message": { + "type": [ + "string", + "null" + ] + } + } + }, + "RefUpdateResponse": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "old_oid": { + "type": [ + "string", + "null" + ] + }, + "new_oid": { + "type": [ + "string", + "null" + ] + } + } + }, + "RegisterParams": { + "type": "object", + "required": [ + "username", + "email", + "password", + "captcha" + ], + "properties": { + "username": { + "type": "string" + }, + "email": { + "type": "string" + }, + "password": { + "type": "string" + }, + "captcha": { + "type": "string" + } + } + }, + "RepoSearchItem": { + "type": "object", + "required": [ + "uid", + "name", + "project_uid", + "project_name", + "is_private", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "project_name": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ResetPasswordParams": { + "type": "object", + "required": [ + "email" + ], + "properties": { + "email": { + "type": "string" + } + } + }, + "ReviewCommentCreateRequest": { + "type": "object", + "required": [ + "body" + ], + "properties": { + "body": { + "type": "string" + }, + "review": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "path": { + "type": [ + "string", + "null" + ] + }, + "side": { + "type": [ + "string", + "null" + ] + }, + "line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "old_line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "in_reply_to": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "description": "ID of the parent comment to reply to (null = root comment)." + } + } + }, + "ReviewCommentListQuery": { + "type": "object", + "properties": { + "path": { + "type": [ + "string", + "null" + ], + "description": "Filter comments by file path (e.g. \"src/main.rs\")." + }, + "resolved": { + "type": [ + "boolean", + "null" + ], + "description": "Filter by resolved status. Omit to return all comments." + }, + "file_only": { + "type": [ + "boolean", + "null" + ], + "description": "If true, only return inline comments (those with a `path` set).\nIf false, only return general comments (no path).\nOmit to return all comments." + } + } + }, + "ReviewCommentListResponse": { + "type": "object", + "required": [ + "comments", + "threads", + "total" + ], + "properties": { + "comments": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewCommentResponse" + }, + "description": "Flat list of all comments (kept for backward compatibility)." + }, + "threads": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewCommentThread" + }, + "description": "Comments grouped into threads (root comments with their replies)." + }, + "total": { + "type": "integer", + "format": "int64" + } + } + }, + "ReviewCommentReplyRequest": { + "type": "object", + "description": "Body for replying to an existing review comment thread.", + "required": [ + "body" + ], + "properties": { + "body": { + "type": "string" + } + } + }, + "ReviewCommentResponse": { + "type": "object", + "required": [ + "repo", + "number", + "id", + "body", + "author", + "resolved", + "created_at", + "updated_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "review": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "path": { + "type": [ + "string", + "null" + ] + }, + "side": { + "type": [ + "string", + "null" + ] + }, + "line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "old_line": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "body": { + "type": "string" + }, + "author": { + "type": "string", + "format": "uuid" + }, + "author_username": { + "type": [ + "string", + "null" + ] + }, + "resolved": { + "type": "boolean" + }, + "in_reply_to": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ReviewCommentThread": { + "type": "object", + "description": "A review comment thread: one root comment plus all its replies.", + "required": [ + "root", + "replies" + ], + "properties": { + "root": { + "$ref": "#/components/schemas/ReviewCommentResponse" + }, + "replies": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewCommentResponse" + } + } + } + }, + "ReviewCommentUpdateRequest": { + "type": "object", + "required": [ + "body" + ], + "properties": { + "body": { + "type": "string" + } + } + }, + "ReviewListResponse": { + "type": "object", + "required": [ + "reviews" + ], + "properties": { + "reviews": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewResponse" + } + } + } + }, + "ReviewRequestCreateRequest": { + "type": "object", + "required": [ + "reviewer" + ], + "properties": { + "reviewer": { + "type": "string", + "format": "uuid", + "description": "User ID of the reviewer to request." + } + } + }, + "ReviewRequestListResponse": { + "type": "object", + "required": [ + "requests", + "total" + ], + "properties": { + "requests": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReviewRequestResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + }, + "ReviewRequestResponse": { + "type": "object", + "required": [ + "repo", + "number", + "reviewer", + "requested_by", + "requested_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "reviewer": { + "type": "string", + "format": "uuid" + }, + "reviewer_username": { + "type": [ + "string", + "null" + ] + }, + "requested_by": { + "type": "string", + "format": "uuid" + }, + "requested_by_username": { + "type": [ + "string", + "null" + ] + }, + "requested_at": { + "type": "string", + "format": "date-time" + }, + "dismissed_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "dismissed_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "dismissed_by_username": { + "type": [ + "string", + "null" + ] + } + } + }, + "ReviewResponse": { + "type": "object", + "required": [ + "repo", + "number", + "reviewer", + "state", + "created_at", + "updated_at" + ], + "properties": { + "repo": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "reviewer": { + "type": "string", + "format": "uuid" + }, + "reviewer_username": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "submitted_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "ReviewSubmitRequest": { + "type": "object", + "required": [ + "state" + ], + "properties": { + "body": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + } + } + }, + "ReviewUpdateRequest": { + "type": "object", + "properties": { + "body": { + "type": [ + "string", + "null" + ] + } + } + }, + "ReviewerInfo": { + "type": "object", + "required": [ + "reviewer", + "state" + ], + "properties": { + "reviewer": { + "type": "string", + "format": "uuid" + }, + "state": { + "type": "string" + }, + "submitted_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "RoomAiResponse": { + "type": "object", + "required": [ + "room", + "model", + "call_count", + "use_exact", + "think", + "stream", + "created_at", + "updated_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "model": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "call_count": { + "type": "integer", + "format": "int64" + }, + "last_call_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "history_limit": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "system_prompt": { + "type": [ + "string", + "null" + ] + }, + "temperature": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "max_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "use_exact": { + "type": "boolean" + }, + "think": { + "type": "boolean" + }, + "stream": { + "type": "boolean" + }, + "min_score": { + "type": [ + "number", + "null" + ], + "format": "float" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "RoomAiUpsertRequest": { + "type": "object", + "required": [ + "model" + ], + "properties": { + "model": { + "type": "string", + "format": "uuid" + }, + "version": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "history_limit": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "system_prompt": { + "type": [ + "string", + "null" + ] + }, + "temperature": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "max_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "use_exact": { + "type": [ + "boolean", + "null" + ] + }, + "think": { + "type": [ + "boolean", + "null" + ] + }, + "stream": { + "type": [ + "boolean", + "null" + ] + }, + "min_score": { + "type": [ + "number", + "null" + ], + "format": "float" + } + } + }, + "RoomCategoryCreateRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "position": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + }, + "RoomCategoryResponse": { + "type": "object", + "required": [ + "id", + "project", + "name", + "position", + "created_by", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "position": { + "type": "integer", + "format": "int32" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "RoomCategoryUpdateRequest": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "position": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + }, + "RoomCreateRequest": { + "type": "object", + "required": [ + "room_name", + "public" + ], + "properties": { + "room_name": { + "type": "string" + }, + "public": { + "type": "boolean" + }, + "category": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + }, + "RoomMemberAddRequest": { + "type": "object", + "required": [ + "user_id" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "role": { + "type": [ + "string", + "null" + ] + } + } + }, + "RoomMemberReadSeqRequest": { + "type": "object", + "required": [ + "last_read_seq" + ], + "properties": { + "last_read_seq": { + "type": "integer", + "format": "int64" + } + } + }, + "RoomMemberResponse": { + "type": "object", + "required": [ + "room", + "user", + "role", + "do_not_disturb" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "user": { + "type": "string", + "format": "uuid" + }, + "user_info": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserInfo" + } + ] + }, + "role": { + "type": "string" + }, + "first_msg_in": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "joined_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_read_seq": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "do_not_disturb": { + "type": "boolean" + }, + "dnd_start_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "dnd_end_hour": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + }, + "RoomMemberRoleUpdateRequest": { + "type": "object", + "required": [ + "user_id", + "role" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "role": { + "type": "string" + } + } + }, + "RoomMessageCreateRequest": { + "type": "object", + "required": [ + "content" + ], + "properties": { + "content": { + "type": "string" + }, + "content_type": { + "type": [ + "string", + "null" + ] + }, + "thread_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "in_reply_to": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + }, + "RoomMessageListResponse": { + "type": "object", + "required": [ + "messages", + "total" + ], + "properties": { + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/RoomMessageResponse" + } + }, + "total": { + "type": "integer", + "format": "int64" + } + } + }, + "RoomMessageResponse": { + "type": "object", + "required": [ + "id", + "seq", + "room", + "sender_type", + "content", + "content_type", + "send_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "seq": { + "type": "integer", + "format": "int64" + }, + "room": { + "type": "string", + "format": "uuid" + }, + "sender_type": { + "type": "string" + }, + "sender_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "thread": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "in_reply_to": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "content": { + "type": "string" + }, + "content_type": { + "type": "string" + }, + "edited_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "send_at": { + "type": "string", + "format": "date-time" + }, + "revoked": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "revoked_by": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + }, + "RoomMessageUpdateRequest": { + "type": "object", + "required": [ + "content" + ], + "properties": { + "content": { + "type": "string" + } + } + }, + "RoomPinResponse": { + "type": "object", + "required": [ + "room", + "message", + "pinned_by", + "pinned_at" + ], + "properties": { + "room": { + "type": "string", + "format": "uuid" + }, + "message": { + "type": "string", + "format": "uuid" + }, + "pinned_by": { + "type": "string", + "format": "uuid" + }, + "pinned_at": { + "type": "string", + "format": "date-time" + } + } + }, + "RoomResponse": { + "type": "object", + "required": [ + "id", + "project", + "room_name", + "public", + "created_by", + "created_at", + "last_msg_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "project": { + "type": "string", + "format": "uuid" + }, + "room_name": { + "type": "string" + }, + "public": { + "type": "boolean" + }, + "category": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "last_msg_at": { + "type": "string", + "format": "date-time" + }, + "unread_count": { + "type": "integer", + "format": "int64" + } + } + }, + "RoomThreadCreateRequest": { + "type": "object", + "required": [ + "parent_seq" + ], + "properties": { + "parent_seq": { + "type": "integer", + "format": "int64" + } + } + }, + "RoomThreadResponse": { + "type": "object", + "required": [ + "id", + "room", + "parent", + "created_by", + "participants", + "last_message_at", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "room": { + "type": "string", + "format": "uuid" + }, + "parent": { + "type": "integer", + "format": "int64" + }, + "created_by": { + "type": "string", + "format": "uuid" + }, + "participants": {}, + "last_message_at": { + "type": "string", + "format": "date-time" + }, + "last_message_preview": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "RoomUpdateRequest": { + "type": "object", + "properties": { + "room_name": { + "type": [ + "string", + "null" + ] + }, + "public": { + "type": [ + "boolean", + "null" + ] + }, + "category": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + }, + "RsaResponse": { + "type": "object", + "required": [ + "public_key" + ], + "properties": { + "public_key": { + "type": "string" + } + } + }, + "ScanResponse": { + "type": "object", + "required": [ + "discovered", + "created", + "updated", + "removed" + ], + "properties": { + "discovered": { + "type": "integer", + "format": "int64" + }, + "created": { + "type": "integer", + "format": "int64" + }, + "updated": { + "type": "integer", + "format": "int64" + }, + "removed": { + "type": "integer", + "format": "int64" + } + } + }, + "SearchResponse": { + "type": "object", + "required": [ + "query" + ], + "properties": { + "query": { + "type": "string" + }, + "projects": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_ProjectSearchItem" + } + ] + }, + "repos": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_RepoSearchItem" + } + ] + }, + "issues": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_IssueSearchItem" + } + ] + }, + "users": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/SearchResultSet_UserSearchItem" + } + ] + } + } + }, + "SearchResultSet_IssueSearchItem": { + "type": "object", + "required": [ + "items", + "total", + "page", + "per_page" + ], + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "number", + "title", + "state", + "project_uid", + "project_name", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "number": { + "type": "integer", + "format": "int64" + }, + "title": { + "type": "string" + }, + "body": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": "string" + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "project_name": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "SearchResultSet_ProjectSearchItem": { + "type": "object", + "required": [ + "items", + "total", + "page", + "per_page" + ], + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "SearchResultSet_RepoSearchItem": { + "type": "object", + "required": [ + "items", + "total", + "page", + "per_page" + ], + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "name", + "project_uid", + "project_name", + "is_private", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "project_uid": { + "type": "string", + "format": "uuid" + }, + "project_name": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "SearchResultSet_UserSearchItem": { + "type": "object", + "required": [ + "items", + "total", + "page", + "per_page" + ], + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "required": [ + "uid", + "username", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + }, + "total": { + "type": "integer", + "format": "int64" + }, + "page": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "SideBySideChangeTypeResponse": { + "type": "string", + "enum": [ + "unchanged", + "added", + "removed", + "modified", + "empty" + ] + }, + "SideBySideDiffQuery": { + "type": "object", + "description": "Query parameters for side-by-side diff.", + "required": [ + "base", + "head" + ], + "properties": { + "base": { + "type": "string", + "description": "OID (SHA) of the base / old commit or tree." + }, + "head": { + "type": "string", + "description": "OID (SHA) of the head / new commit or tree." + }, + "pathspec": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string" + }, + "description": "Optional path filter — only include files matching this prefix." + }, + "context_lines": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Number of context lines around changes (default 3).", + "minimum": 0 + } + } + }, + "SideBySideDiffResponse": { + "type": "object", + "required": [ + "files", + "total_additions", + "total_deletions" + ], + "properties": { + "files": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SideBySideFileResponse" + } + }, + "total_additions": { + "type": "integer", + "minimum": 0 + }, + "total_deletions": { + "type": "integer", + "minimum": 0 + } + } + }, + "SideBySideFileResponse": { + "type": "object", + "required": [ + "path", + "additions", + "deletions", + "is_binary", + "is_rename", + "lines" + ], + "properties": { + "path": { + "type": "string" + }, + "additions": { + "type": "integer", + "minimum": 0 + }, + "deletions": { + "type": "integer", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + }, + "is_rename": { + "type": "boolean" + }, + "lines": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SideBySideLineResponse" + } + } + } + }, + "SideBySideLineResponse": { + "type": "object", + "required": [ + "left_content", + "right_content", + "change_type" + ], + "properties": { + "left_line_no": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "right_line_no": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "minimum": 0 + }, + "left_content": { + "type": "string" + }, + "right_content": { + "type": "string" + }, + "change_type": { + "$ref": "#/components/schemas/SideBySideChangeTypeResponse" + } + } + }, + "SkillResponse": { + "type": "object", + "required": [ + "id", + "project_uuid", + "slug", + "name", + "source", + "content", + "metadata", + "enabled", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_uuid": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "source": { + "type": "string" + }, + "repo_id": { + "type": [ + "string", + "null" + ] + }, + "commit_sha": { + "type": [ + "string", + "null" + ] + }, + "blob_hash": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": "string" + }, + "metadata": {}, + "enabled": { + "type": "boolean" + }, + "created_by": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "SshKeyListResponse": { + "type": "object", + "required": [ + "keys", + "total" + ], + "properties": { + "keys": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SshKeyResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + }, + "SshKeyResponse": { + "type": "object", + "required": [ + "id", + "user_uid", + "title", + "fingerprint", + "key_type", + "is_verified", + "is_revoked", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "title": { + "type": "string" + }, + "fingerprint": { + "type": "string" + }, + "key_type": { + "type": "string" + }, + "key_bits": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "is_verified": { + "type": "boolean" + }, + "last_used_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_revoked": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "StarCountResponse": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64" + } + } + }, + "StarUserInfo": { + "type": "object", + "required": [ + "uid", + "username", + "avatar_url" + ], + "properties": { + "uid": { + "type": "string" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": "string" + } + } + }, + "StarUserListResponse": { + "type": "object", + "required": [ + "users" + ], + "properties": { + "users": { + "type": "array", + "items": { + "$ref": "#/components/schemas/StarUserInfo" + } + } + } + }, + "SubmitJoinRequest": { + "type": "object", + "required": [ + "answers" + ], + "properties": { + "message": { + "type": [ + "string", + "null" + ] + }, + "answers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AnswerRequest" + } + } + } + }, + "SubscriptionInfo": { + "type": "object", + "required": [ + "id", + "user_uid", + "target_uid", + "subscribed_at", + "is_active" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "user_uid": { + "type": "string", + "format": "uuid" + }, + "target_uid": { + "type": "string", + "format": "uuid" + }, + "subscribed_at": { + "type": "string", + "format": "date-time" + }, + "is_active": { + "type": "boolean" + } + } + }, + "TagCountResponse": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "minimum": 0 + } + } + }, + "TagCreateLightweightRequest": { + "type": "object", + "required": [ + "name", + "target" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": "string" + }, + "force": { + "type": "boolean" + } + } + }, + "TagCreateRequest": { + "type": "object", + "required": [ + "name", + "target" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "tagger_name": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + }, + "force": { + "type": "boolean" + } + } + }, + "TagExistsResponse": { + "type": "object", + "required": [ + "name", + "exists" + ], + "properties": { + "name": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "TagInfoResponse": { + "type": "object", + "required": [ + "name", + "oid", + "target", + "is_annotated" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "target": { + "type": "string" + }, + "is_annotated": { + "type": "boolean" + }, + "message": { + "type": [ + "string", + "null" + ] + }, + "tagger": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + } + } + }, + "TagIsAnnotatedResponse": { + "type": "object", + "required": [ + "name", + "is_annotated" + ], + "properties": { + "name": { + "type": "string" + }, + "is_annotated": { + "type": "boolean" + } + } + }, + "TagMessageResponse": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "message": { + "type": [ + "string", + "null" + ] + } + } + }, + "TagRenameQuery": { + "type": "object", + "required": [ + "old_name", + "new_name" + ], + "properties": { + "old_name": { + "type": "string" + }, + "new_name": { + "type": "string" + } + } + }, + "TagSummaryResponse": { + "type": "object", + "required": [ + "total_count" + ], + "properties": { + "total_count": { + "type": "integer", + "minimum": 0 + } + } + }, + "TagTaggerResponse": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "tagger": { + "type": [ + "string", + "null" + ] + }, + "tagger_email": { + "type": [ + "string", + "null" + ] + } + } + }, + "TagTargetResponse": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "target": { + "type": [ + "string", + "null" + ] + } + } + }, + "TagUpdateMessageRequest": { + "type": "object", + "required": [ + "name", + "message", + "tagger_name", + "tagger_email" + ], + "properties": { + "name": { + "type": "string" + }, + "message": { + "type": "string" + }, + "tagger_name": { + "type": "string" + }, + "tagger_email": { + "type": "string" + } + } + }, + "TransferRepoParams": { + "type": "object", + "required": [ + "target_project_name" + ], + "properties": { + "target_project_name": { + "type": "string" + } + } + }, + "TransferRepoResponse": { + "type": "object", + "required": [ + "repo_id", + "old_project_name", + "new_project_name", + "repo_name" + ], + "properties": { + "repo_id": { + "type": "string", + "format": "uuid" + }, + "old_project_name": { + "type": "string" + }, + "new_project_name": { + "type": "string" + }, + "repo_name": { + "type": "string" + } + } + }, + "TreeDiffStatsResponse": { + "type": "object", + "required": [ + "old_tree", + "new_tree", + "files_changed", + "insertions", + "deletions" + ], + "properties": { + "old_tree": { + "type": "string" + }, + "new_tree": { + "type": "string" + }, + "files_changed": { + "type": "integer", + "minimum": 0 + }, + "insertions": { + "type": "integer", + "minimum": 0 + }, + "deletions": { + "type": "integer", + "minimum": 0 + } + } + }, + "TreeEntryCountResponse": { + "type": "object", + "required": [ + "oid", + "count" + ], + "properties": { + "oid": { + "type": "string" + }, + "count": { + "type": "integer", + "minimum": 0 + } + } + }, + "TreeEntryResponse": { + "type": "object", + "required": [ + "name", + "oid", + "kind", + "filemode", + "is_binary" + ], + "properties": { + "name": { + "type": "string" + }, + "oid": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "filemode": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "is_binary": { + "type": "boolean" + } + } + }, + "TreeExistsResponse": { + "type": "object", + "required": [ + "oid", + "exists" + ], + "properties": { + "oid": { + "type": "string" + }, + "exists": { + "type": "boolean" + } + } + }, + "TreeInfoResponse": { + "type": "object", + "required": [ + "oid", + "entry_count", + "is_empty" + ], + "properties": { + "oid": { + "type": "string" + }, + "entry_count": { + "type": "integer", + "minimum": 0 + }, + "is_empty": { + "type": "boolean" + } + } + }, + "TreeIsEmptyResponse": { + "type": "object", + "required": [ + "oid", + "is_empty" + ], + "properties": { + "oid": { + "type": "string" + }, + "is_empty": { + "type": "boolean" + } + } + }, + "TriggerCodeReviewRequest": { + "type": "object", + "properties": { + "pr_number": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "model_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + }, + "TriggerCodeReviewResponse": { + "type": "object", + "required": [ + "comments_posted", + "comments" + ], + "properties": { + "comments_posted": { + "type": "integer", + "minimum": 0 + }, + "comments": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CommentCreated" + } + }, + "billing": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/BillingRecord" + } + ] + } + } + }, + "UpdateBoardParams": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateCardParams": { + "type": "object", + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "assignee_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "due_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "priority": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateColumnParams": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "position": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "wip_limit": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "color": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateJoinSettingsRequest": { + "type": "object", + "required": [ + "require_approval", + "require_questions", + "questions" + ], + "properties": { + "require_approval": { + "type": "boolean" + }, + "require_questions": { + "type": "boolean" + }, + "questions": { + "type": "array", + "items": { + "$ref": "#/components/schemas/QuestionSchema" + } + } + } + }, + "UpdateLabelParams": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "color": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateMemberRoleRequest": { + "type": "object", + "required": [ + "user_id", + "scope" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "scope": { + "$ref": "#/components/schemas/MemberRole" + } + } + }, + "UpdateModelCapabilityRequest": { + "type": "object", + "properties": { + "is_supported": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UpdateModelParameterProfileRequest": { + "type": "object", + "properties": { + "temperature_min": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "temperature_max": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "top_p_min": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "top_p_max": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "frequency_penalty_supported": { + "type": [ + "boolean", + "null" + ] + }, + "presence_penalty_supported": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UpdateModelPricingRequest": { + "type": "object", + "properties": { + "input_price_per_1k_tokens": { + "type": [ + "string", + "null" + ] + }, + "output_price_per_1k_tokens": { + "type": [ + "string", + "null" + ] + }, + "currency": { + "type": [ + "string", + "null" + ] + }, + "effective_from": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "UpdateModelRequest": { + "type": "object", + "properties": { + "display_name": { + "type": [ + "string", + "null" + ] + }, + "modality": { + "type": [ + "string", + "null" + ] + }, + "capability": { + "type": [ + "string", + "null" + ] + }, + "context_length": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "max_output_tokens": { + "type": [ + "integer", + "null" + ], + "format": "int64" + }, + "training_cutoff": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_open_source": { + "type": [ + "boolean", + "null" + ] + }, + "status": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateModelVersionRequest": { + "type": "object", + "properties": { + "version": { + "type": [ + "string", + "null" + ] + }, + "release_date": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "change_log": { + "type": [ + "string", + "null" + ] + }, + "is_default": { + "type": [ + "boolean", + "null" + ] + }, + "status": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateProfileParams": { + "type": "object", + "properties": { + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "website_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateProviderRequest": { + "type": "object", + "properties": { + "display_name": { + "type": [ + "string", + "null" + ] + }, + "website": { + "type": [ + "string", + "null" + ] + }, + "status": { + "type": [ + "string", + "null" + ] + } + } + }, + "UpdateRoleParams": { + "type": "object", + "required": [ + "user_id", + "role" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "role": { + "type": "string" + } + } + }, + "UpdateSkillRequest": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "metadata": {}, + "enabled": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UpdateSshKeyParams": { + "type": "object", + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "expires_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "UpdateWebhookParams": { + "type": "object", + "properties": { + "url": { + "type": [ + "string", + "null" + ] + }, + "content_type": { + "type": [ + "string", + "null" + ] + }, + "secret": { + "type": [ + "string", + "null" + ] + }, + "insecure_ssl": { + "type": [ + "boolean", + "null" + ] + }, + "events": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/WebhookEvent" + } + ] + }, + "active": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UserInfo": { + "type": "object", + "required": [ + "uid", + "username" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + } + } + }, + "UserInfoExternal": { + "type": "object", + "required": [ + "user_uid", + "username", + "display_name", + "timezone", + "language", + "is_owner", + "is_subscribe", + "total_projects", + "total_repos" + ], + "properties": { + "user_uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "master_email": { + "type": [ + "string", + "null" + ] + }, + "timezone": { + "type": "string" + }, + "language": { + "type": "string" + }, + "website_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "last_sign_in_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "is_owner": { + "type": "boolean" + }, + "is_subscribe": { + "type": "boolean" + }, + "total_projects": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total_repos": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "UserProjectInfo": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at", + "member_count", + "is_member" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "is_member": { + "type": "boolean" + } + } + }, + "UserProjectsQuery": { + "type": "object", + "properties": { + "page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + } + } + }, + "UserProjectsResponse": { + "type": "object", + "required": [ + "username", + "projects", + "total_count" + ], + "properties": { + "username": { + "type": "string" + }, + "projects": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserProjectInfo" + } + }, + "total_count": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "UserRepoInfo": { + "type": "object", + "required": [ + "uid", + "repo_name", + "default_branch", + "is_private", + "storage_path", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "repo_name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "default_branch": { + "type": "string" + }, + "is_private": { + "type": "boolean" + }, + "storage_path": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "UserReposQuery": { + "type": "object", + "properties": { + "page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + } + } + }, + "UserReposResponse": { + "type": "object", + "required": [ + "username", + "repos", + "total_count" + ], + "properties": { + "username": { + "type": "string" + }, + "repos": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserRepoInfo" + } + }, + "total_count": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "UserSearchItem": { + "type": "object", + "required": [ + "uid", + "username", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "organization": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "Verify2FAParams": { + "type": "object", + "required": [ + "code" + ], + "properties": { + "code": { + "type": "string" + } + } + }, + "WatchCountResponse": { + "type": "object", + "required": [ + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64" + } + } + }, + "WatchUserInfo": { + "type": "object", + "required": [ + "uid", + "username", + "avatar_url" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "avatar_url": { + "type": "string" + } + } + }, + "WatchUserListResponse": { + "type": "object", + "required": [ + "users" + ], + "properties": { + "users": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WatchUserInfo" + } + } + } + }, + "WebhookEvent": { + "type": "object", + "properties": { + "push": { + "type": "boolean" + }, + "tag_push": { + "type": "boolean" + }, + "pull_request": { + "type": "boolean" + }, + "issue_comment": { + "type": "boolean" + }, + "release": { + "type": "boolean" + } + } + }, + "WebhookListResponse": { + "type": "object", + "required": [ + "webhooks", + "total" + ], + "properties": { + "webhooks": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WebhookResponse" + } + }, + "total": { + "type": "integer", + "minimum": 0 + } + } + }, + "WebhookResponse": { + "type": "object", + "required": [ + "id", + "repo_uuid", + "url", + "content_type", + "events", + "active", + "created_at", + "touch_count" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "repo_uuid": { + "type": "string" + }, + "url": { + "type": "string" + }, + "content_type": { + "type": "string" + }, + "secret": { + "type": [ + "string", + "null" + ] + }, + "events": { + "$ref": "#/components/schemas/WebhookEvent" + }, + "active": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "last_delivered_at": { + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "touch_count": { + "type": "integer", + "format": "int64" + } + } + }, + "WorkspaceActivityItem": { + "type": "object", + "required": [ + "id", + "project_name", + "event_type", + "title", + "actor_name", + "created_at" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "project_name": { + "type": "string" + }, + "event_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "content": { + "type": [ + "string", + "null" + ] + }, + "actor_name": { + "type": "string" + }, + "actor_avatar": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceBillingAddCreditParams": { + "type": "object", + "required": [ + "amount" + ], + "properties": { + "amount": { + "type": "number", + "format": "double" + }, + "reason": { + "type": [ + "string", + "null" + ] + } + } + }, + "WorkspaceBillingCurrentResponse": { + "type": "object", + "required": [ + "workspace_id", + "currency", + "monthly_quota", + "balance", + "total_spent", + "month_used", + "cycle_start_utc", + "cycle_end_utc", + "updated_at", + "created_at" + ], + "properties": { + "workspace_id": { + "type": "string", + "format": "uuid" + }, + "currency": { + "type": "string" + }, + "monthly_quota": { + "type": "number", + "format": "double" + }, + "balance": { + "type": "number", + "format": "double" + }, + "total_spent": { + "type": "number", + "format": "double" + }, + "month_used": { + "type": "number", + "format": "double" + }, + "cycle_start_utc": { + "type": "string", + "format": "date-time" + }, + "cycle_end_utc": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceBillingHistoryItem": { + "type": "object", + "required": [ + "uid", + "workspace_id", + "amount", + "currency", + "reason", + "created_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "workspace_id": { + "type": "string", + "format": "uuid" + }, + "user_id": { + "type": [ + "string", + "null" + ], + "format": "uuid" + }, + "amount": { + "type": "number", + "format": "double" + }, + "currency": { + "type": "string" + }, + "reason": { + "type": "string" + }, + "extra": {}, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceBillingHistoryQuery": { + "type": "object", + "properties": { + "page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + } + } + }, + "WorkspaceBillingHistoryResponse": { + "type": "object", + "required": [ + "page", + "per_page", + "total", + "list" + ], + "properties": { + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "list": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceBillingHistoryItem" + } + } + } + }, + "WorkspaceInfoResponse": { + "type": "object", + "required": [ + "id", + "slug", + "name", + "plan", + "member_count", + "created_at", + "updated_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "plan": { + "type": "string" + }, + "billing_email": { + "type": [ + "string", + "null" + ] + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "my_role": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceInitParams": { + "type": "object", + "required": [ + "slug", + "name" + ], + "properties": { + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + } + } + }, + "WorkspaceInviteAcceptParams": { + "type": "object", + "required": [ + "token" + ], + "properties": { + "token": { + "type": "string" + } + } + }, + "WorkspaceInviteParams": { + "type": "object", + "required": [ + "email" + ], + "properties": { + "email": { + "type": "string" + }, + "role": { + "type": [ + "string", + "null" + ] + } + } + }, + "WorkspaceListItem": { + "type": "object", + "required": [ + "id", + "slug", + "name", + "plan", + "my_role", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "slug": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "plan": { + "type": "string" + }, + "my_role": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceListResponse": { + "type": "object", + "required": [ + "workspaces", + "total" + ], + "properties": { + "workspaces": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceListItem" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "WorkspaceMemberInfo": { + "type": "object", + "required": [ + "user_id", + "username", + "role", + "joined_at" + ], + "properties": { + "user_id": { + "type": "string", + "format": "uuid" + }, + "username": { + "type": "string" + }, + "display_name": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "role": { + "type": "string" + }, + "joined_at": { + "type": "string", + "format": "date-time" + }, + "invited_by_username": { + "type": [ + "string", + "null" + ], + "description": "Username of the person who invited this member." + } + } + }, + "WorkspaceMembersResponse": { + "type": "object", + "required": [ + "members", + "total", + "page", + "per_page" + ], + "properties": { + "members": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceMemberInfo" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "WorkspaceProjectItem": { + "type": "object", + "required": [ + "uid", + "name", + "display_name", + "is_public", + "created_at", + "updated_at" + ], + "properties": { + "uid": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "display_name": { + "type": "string" + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "is_public": { + "type": "boolean" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "WorkspaceProjectsQuery": { + "type": "object", + "properties": { + "page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "minimum": 0 + } + } + }, + "WorkspaceProjectsResponse": { + "type": "object", + "required": [ + "projects", + "total", + "page", + "per_page" + ], + "properties": { + "projects": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceProjectItem" + } + }, + "total": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "page": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "per_page": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "WorkspaceStatsResponse": { + "type": "object", + "required": [ + "project_count", + "member_count", + "recent_activities" + ], + "properties": { + "project_count": { + "type": "integer", + "format": "int64" + }, + "member_count": { + "type": "integer", + "format": "int64" + }, + "my_role": { + "type": [ + "string", + "null" + ] + }, + "recent_activities": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkspaceActivityItem" + } + } + } + }, + "WorkspaceUpdateParams": { + "type": "object", + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "avatar_url": { + "type": [ + "string", + "null" + ] + }, + "billing_email": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, + "tags": [ + { + "name": "Auth", + "description": "Authentication and user identity" + }, + { + "name": "Agent", + "description": "AI agent model management" + }, + { + "name": "Git", + "description": "Git repository operations" + }, + { + "name": "Issues", + "description": "Issue tracking" + }, + { + "name": "Project", + "description": "Project management" + }, + { + "name": "PullRequest", + "description": "Pull request management" + }, + { + "name": "Room", + "description": "Real-time chat rooms" + }, + { + "name": "Search", + "description": "Global and room message search" + }, + { + "name": "User", + "description": "User profiles and settings" + }, + { + "name": "Workspace", + "description": "Workspace management and collaboration" + } + ] +} \ No newline at end of file diff --git a/openspec/config.yaml b/openspec/config.yaml new file mode 100644 index 0000000..392946c --- /dev/null +++ b/openspec/config.yaml @@ -0,0 +1,20 @@ +schema: spec-driven + +# Project context (optional) +# This is shown to AI when creating artifacts. +# Add your tech stack, conventions, style guides, domain knowledge, etc. +# Example: +# context: | +# Tech stack: TypeScript, React, Node.js +# We use conventional commits +# Domain: e-commerce platform + +# Per-artifact rules (optional) +# Add custom rules for specific artifacts. +# Example: +# rules: +# proposal: +# - Keep proposals under 500 words +# - Always include a "Non-goals" section +# tasks: +# - Break tasks into chunks of max 2 hours diff --git a/package.json b/package.json new file mode 100644 index 0000000..55d61ee --- /dev/null +++ b/package.json @@ -0,0 +1,73 @@ +{ + "name": "code", + "version": "1.0.0", + "description": "Code API monorepo", + "private": true, + "scripts": { + "openapi:gen-json": "cargo run -p api --bin gen_api", + "openapi:gen-client": "node scripts/gen-client.js", + "openapi:gen": "pnpm openapi:gen-json && pnpm openapi:gen-client", + "dev": "vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@base-ui/react": "^1.3.0", + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "@fontsource-variable/geist": "^5.2.8", + "@gitgraph/react": "^1.6.0", + "@tailwindcss/vite": "^4.2.2", + "@tanstack/react-query": "^5.96.0", + "@tanstack/react-virtual": "^3.13.23", + "axios": "^1.7.0", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "date-fns": "^4.1.0", + "embla-carousel-react": "^8.6.0", + "form-data": "^4.0.5", + "framer-motion": "11", + "input-otp": "^1.4.2", + "jsencrypt": "^3.5.4", + "lucide-react": "^1.7.0", + "next-themes": "^0.4.6", + "react": "^19.2.4", + "react-day-picker": "^9.14.0", + "react-dom": "^19.2.4", + "react-markdown": "10", + "react-resizable-panels": "^4.8.0", + "react-router-dom": "^7.13.2", + "recharts": "3.8.0", + "remark-gfm": "4", + "remeda": "2", + "shadcn": "^4.1.1", + "shiki": "1", + "sonner": "^2.0.7", + "tailwind-merge": "^3.5.0", + "tailwindcss": "^4.2.2", + "tw-animate-css": "^1.4.0", + "uuid": "^13.0.0", + "zustand": "^5.0.0", + "vaul": "^1.1.2" + }, + "devDependencies": { + "@eslint/js": "^9.39.4", + "@hey-api/openapi-ts": "0.94.5", + "@types/node": "^24.12.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^6.0.1", + "@vitejs/plugin-react-swc": "^4.3.0", + "eslint": "^9.39.4", + "eslint-plugin-react-hooks": "^7.0.1", + "eslint-plugin-react-refresh": "^0.5.2", + "globals": "^17.4.0", + "typescript": "~5.9.3", + "typescript-eslint": "^8.57.0", + "vite": "^8.0.1" + }, + "packageManager": "pnpm@10.32.1" +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 0000000..3b8c7c4 --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,7114 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@base-ui/react': + specifier: ^1.3.0 + version: 1.3.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@dnd-kit/core': + specifier: ^6.3.1 + version: 6.3.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@dnd-kit/sortable': + specifier: ^10.0.0 + version: 10.0.0(@dnd-kit/core@6.3.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4) + '@dnd-kit/utilities': + specifier: ^3.2.2 + version: 3.2.2(react@19.2.4) + '@fontsource-variable/geist': + specifier: ^5.2.8 + version: 5.2.8 + '@gitgraph/react': + specifier: ^1.6.0 + version: 1.6.0(react@19.2.4) + '@radix-ui/react-slot': + specifier: ^1.2.0 + version: 1.2.4(@types/react@19.2.14)(react@19.2.4) + '@tailwindcss/vite': + specifier: ^4.2.2 + version: 4.2.2(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1)) + '@tanstack/react-query': + specifier: ^5.96.0 + version: 5.96.0(react@19.2.4) + '@tanstack/react-virtual': + specifier: ^3.13.23 + version: 3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + axios: + specifier: ^1.7.0 + version: 1.14.0 + class-variance-authority: + specifier: ^0.7.1 + version: 0.7.1 + clsx: + specifier: ^2.1.1 + version: 2.1.1 + cmdk: + specifier: ^1.1.1 + version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + date-fns: + specifier: ^4.1.0 + version: 4.1.0 + embla-carousel-react: + specifier: ^8.6.0 + version: 8.6.0(react@19.2.4) + form-data: + specifier: ^4.0.5 + version: 4.0.5 + framer-motion: + specifier: '11' + version: 11.18.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + input-otp: + specifier: ^1.4.2 + version: 1.4.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + jsencrypt: + specifier: ^3.5.4 + version: 3.5.4 + lucide-react: + specifier: ^1.7.0 + version: 1.7.0(react@19.2.4) + next-themes: + specifier: ^0.4.6 + version: 0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: + specifier: ^19.2.4 + version: 19.2.4 + react-day-picker: + specifier: ^9.14.0 + version: 9.14.0(react@19.2.4) + react-dom: + specifier: ^19.2.4 + version: 19.2.4(react@19.2.4) + react-markdown: + specifier: '10' + version: 10.1.0(@types/react@19.2.14)(react@19.2.4) + react-resizable-panels: + specifier: ^4.8.0 + version: 4.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react-router-dom: + specifier: ^7.13.2 + version: 7.13.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + recharts: + specifier: 3.8.0 + version: 3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@19.2.4)(react@19.2.4)(redux@5.0.1) + remark-gfm: + specifier: '4' + version: 4.0.1 + remeda: + specifier: '2' + version: 2.33.7 + shadcn: + specifier: ^4.1.1 + version: 4.1.1(@types/node@24.12.0)(typescript@5.9.3) + shiki: + specifier: '1' + version: 1.29.2 + sonner: + specifier: ^2.0.7 + version: 2.0.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + tailwind-merge: + specifier: ^3.5.0 + version: 3.5.0 + tailwindcss: + specifier: ^4.2.2 + version: 4.2.2 + tw-animate-css: + specifier: ^1.4.0 + version: 1.4.0 + uuid: + specifier: ^13.0.0 + version: 13.0.0 + vaul: + specifier: ^1.1.2 + version: 1.1.2(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + zustand: + specifier: ^5.0.0 + version: 5.0.12(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + devDependencies: + '@eslint/js': + specifier: ^9.39.4 + version: 9.39.4 + '@hey-api/openapi-ts': + specifier: 0.94.5 + version: 0.94.5(typescript@5.9.3) + '@types/node': + specifier: ^24.12.0 + version: 24.12.0 + '@types/react': + specifier: ^19.2.14 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.14) + '@vitejs/plugin-react': + specifier: ^6.0.1 + version: 6.0.1(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1)) + '@vitejs/plugin-react-swc': + specifier: ^4.3.0 + version: 4.3.0(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1)) + eslint: + specifier: ^9.39.4 + version: 9.39.4(jiti@2.6.1) + eslint-plugin-react-hooks: + specifier: ^7.0.1 + version: 7.0.1(eslint@9.39.4(jiti@2.6.1)) + eslint-plugin-react-refresh: + specifier: ^0.5.2 + version: 0.5.2(eslint@9.39.4(jiti@2.6.1)) + globals: + specifier: ^17.4.0 + version: 17.4.0 + typescript: + specifier: ~5.9.3 + version: 5.9.3 + typescript-eslint: + specifier: ^8.57.0 + version: 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + vite: + specifier: ^8.0.1 + version: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1) + + packages/client: {} + + packages/web: {} + +packages: + + '@babel/code-frame@7.29.0': + resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.29.0': + resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.29.0': + resolution: {integrity: sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.29.1': + resolution: {integrity: sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-annotate-as-pure@7.27.3': + resolution: {integrity: sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.28.6': + resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-create-class-features-plugin@7.28.6': + resolution: {integrity: sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-member-expression-to-functions@7.28.5': + resolution: {integrity: sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.28.6': + resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.28.6': + resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-optimise-call-expression@7.27.1': + resolution: {integrity: sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-plugin-utils@7.28.6': + resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==} + engines: {node: '>=6.9.0'} + + '@babel/helper-replace-supers@7.28.6': + resolution: {integrity: sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': + resolution: {integrity: sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.29.2': + resolution: {integrity: sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.29.2': + resolution: {integrity: sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-syntax-jsx@7.28.6': + resolution: {integrity: sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-typescript@7.28.6': + resolution: {integrity: sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-modules-commonjs@7.28.6': + resolution: {integrity: sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-typescript@7.28.6': + resolution: {integrity: sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/preset-typescript@7.28.5': + resolution: {integrity: sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/runtime@7.29.2': + resolution: {integrity: sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g==} + engines: {node: '>=6.9.0'} + + '@babel/template@7.28.6': + resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.29.0': + resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.29.0': + resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} + engines: {node: '>=6.9.0'} + + '@base-ui/react@1.3.0': + resolution: {integrity: sha512-FwpKqZbPz14AITp1CVgf4AjhKPe1OeeVKSBMdgD10zbFlj3QSWelmtCMLi2+/PFZZcIm3l87G7rwtCZJwHyXWA==} + engines: {node: '>=14.0.0'} + peerDependencies: + '@types/react': ^17 || ^18 || ^19 + react: ^17 || ^18 || ^19 + react-dom: ^17 || ^18 || ^19 + peerDependenciesMeta: + '@types/react': + optional: true + + '@base-ui/utils@0.2.6': + resolution: {integrity: sha512-yQ+qeuqohwhsNpoYDqqXaLllYAkPCP4vYdDrVo8FQXaAPfHWm1pG/Vm+jmGTA5JFS0BAIjookyapuJFY8F9PIw==} + peerDependencies: + '@types/react': ^17 || ^18 || ^19 + react: ^17 || ^18 || ^19 + react-dom: ^17 || ^18 || ^19 + peerDependenciesMeta: + '@types/react': + optional: true + + '@date-fns/tz@1.4.1': + resolution: {integrity: sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==} + + '@dnd-kit/accessibility@3.1.1': + resolution: {integrity: sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==} + peerDependencies: + react: '>=16.8.0' + + '@dnd-kit/core@6.3.1': + resolution: {integrity: sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + + '@dnd-kit/sortable@10.0.0': + resolution: {integrity: sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==} + peerDependencies: + '@dnd-kit/core': ^6.3.0 + react: '>=16.8.0' + + '@dnd-kit/utilities@3.2.2': + resolution: {integrity: sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==} + peerDependencies: + react: '>=16.8.0' + + '@dotenvx/dotenvx@1.59.1': + resolution: {integrity: sha512-Qg+meC+XFxliuVSDlEPkKnaUjdaJKK6FNx/Wwl2UxhQR8pyPIuLhMavsF7ePdB9qFZUWV1jEK3ckbJir/WmF4w==} + hasBin: true + + '@ecies/ciphers@0.2.5': + resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + peerDependencies: + '@noble/ciphers': ^1.0.0 + + '@emnapi/core@1.9.1': + resolution: {integrity: sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==} + + '@emnapi/runtime@1.9.1': + resolution: {integrity: sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==} + + '@emnapi/wasi-threads@1.2.0': + resolution: {integrity: sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==} + + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.21.2': + resolution: {integrity: sha512-nJl2KGTlrf9GjLimgIru+V/mzgSK0ABCDQRvxw5BjURL7WfH5uoWmizbH7QB6MmnMBd8cIC9uceWnezL1VZWWw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/config-helpers@0.4.2': + resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.17.0': + resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.3.5': + resolution: {integrity: sha512-4IlJx0X0qftVsN5E+/vGujTRIFtwuLbNsVUe7TO6zYPDR1O6nFwvwhIKEKSrl6dZchmYBITazxKoUYOjdtjlRg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.39.4': + resolution: {integrity: sha512-nE7DEIchvtiFTwBw4Lfbu59PG+kCofhjsKaCWzxTpt4lfRjRMqG6uMBzKXuEcyXhOHoUp9riAm7/aWYGhXZ9cw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.7': + resolution: {integrity: sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.4.1': + resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@floating-ui/core@1.7.5': + resolution: {integrity: sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ==} + + '@floating-ui/dom@1.7.6': + resolution: {integrity: sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ==} + + '@floating-ui/react-dom@2.1.8': + resolution: {integrity: sha512-cC52bHwM/n/CxS87FH0yWdngEZrjdtLW/qVruo68qg+prK7ZQ4YGdut2GyDVpoGeAYe/h899rVeOVm6Oi40k2A==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + + '@floating-ui/utils@0.2.11': + resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + + '@fontsource-variable/geist@5.2.8': + resolution: {integrity: sha512-cJ6m9e+8MQ5dCYJsLylfZrgBh6KkG4bOLckB35Tr9J/EqdkEM6QllH5PxqP1dhTvFup+HtMRPuz9xOjxXJggxw==} + + '@gitgraph/core@1.5.0': + resolution: {integrity: sha512-8CeeHbkKoFHM1y9vfjYiHyEpzl1mEhVrg5c/eFgDBsntOYswoDKU2yOf6DjtVINcE60wmcuynBSJqjMkQo07Ww==} + + '@gitgraph/react@1.6.0': + resolution: {integrity: sha512-cLFNZDoEiNbsnMfdT82zeZti5saYghQamfCbTpVvCRr+BrrQ/k94glkZqYPXKKNTEqbQV6L9JeOfAk1hNiFYXA==} + peerDependencies: + react: '>= 16.8.0' + + '@hey-api/codegen-core@0.7.4': + resolution: {integrity: sha512-DGd9yeSQzflOWO3Y5mt1GRXkXH9O/yIMgbxPjwLI3jwu/3nAjoXXD26lEeFb6tclYlg0JAqTIs5d930G/qxHeA==} + engines: {node: '>=20.19.0'} + + '@hey-api/json-schema-ref-parser@1.3.1': + resolution: {integrity: sha512-7atnpUkT8TyUPHYPLk91j/GyaqMuwTEHanLOe50Dlx0EEvNuQqFD52Yjg8x4KU0UFL1mWlyhE+sUE/wAtQ1N2A==} + engines: {node: '>=20.19.0'} + + '@hey-api/openapi-ts@0.94.5': + resolution: {integrity: sha512-fCR/kIexbDarnt/WGKvjJb4K30JaFzO2F/528kHpyWT7vopPS0JeqtRQMjJg+Gk09N/05nbv1OaFOQXcy0BiVQ==} + engines: {node: '>=20.19.0'} + hasBin: true + peerDependencies: + typescript: '>=5.5.3 || >=6.0.0 || 6.0.1-rc' + + '@hey-api/shared@0.2.6': + resolution: {integrity: sha512-ZZrsWbazJcJO688tJVEBeei03B4miPI7OauW+qLMYP/9KL6NadmA5MjqsIIwgfvb0HKMAR7lt4AINKzv0Zwdgw==} + engines: {node: '>=20.19.0'} + + '@hey-api/spec-types@0.1.0': + resolution: {integrity: sha512-StS4RrAO5pyJCBwe6uF9MAuPflkztriW+FPnVb7oEjzDYv1sxPwP+f7fL6u6D+UVrKpZ/9bPNx/xXVdkeWPU6A==} + + '@hey-api/types@0.1.4': + resolution: {integrity: sha512-thWfawrDIP7wSI9ioT13I5soaaqB5vAPIiZmgD8PbeEVKNrkonc0N/Sjj97ezl7oQgusZmaNphGdMKipPO6IBg==} + + '@hono/node-server@1.19.12': + resolution: {integrity: sha512-txsUW4SQ1iilgE0l9/e9VQWmELXifEFvmdA1j6WFh/aFPj99hIntrSsq/if0UWyGVkmrRPKA1wCeP+UCr1B9Uw==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.7': + resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} + engines: {node: '>=18.18'} + + '@inquirer/ansi@1.0.2': + resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} + engines: {node: '>=18'} + + '@inquirer/confirm@5.1.21': + resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/core@10.3.2': + resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/figures@1.0.15': + resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} + engines: {node: '>=18'} + + '@inquirer/type@3.0.10': + resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@jsdevtools/ono@7.1.3': + resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} + + '@modelcontextprotocol/sdk@1.29.0': + resolution: {integrity: sha512-zo37mZA9hJWpULgkRpowewez1y6ML5GsXJPY8FI0tBBCd77HEvza4jDqRKOXgHNn867PVGCyTdzqpz0izu5ZjQ==} + engines: {node: '>=18'} + peerDependencies: + '@cfworker/json-schema': ^4.1.1 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + '@cfworker/json-schema': + optional: true + + '@mswjs/interceptors@0.41.3': + resolution: {integrity: sha512-cXu86tF4VQVfwz8W1SPbhoRyHJkti6mjH/XJIxp40jhO4j2k1m4KYrEykxqWPkFF3vrK4rgQppBh//AwyGSXPA==} + engines: {node: '>=18'} + + '@napi-rs/wasm-runtime@1.1.2': + resolution: {integrity: sha512-sNXv5oLJ7ob93xkZ1XnxisYhGYXfaG9f65/ZgYuAu3qt7b3NadcOEhLvx28hv31PgX8SZJRYrAIPQilQmFpLVw==} + peerDependencies: + '@emnapi/core': ^1.7.1 + '@emnapi/runtime': ^1.7.1 + + '@noble/ciphers@1.3.0': + resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} + engines: {node: ^14.21.3 || >=16} + + '@noble/curves@1.9.7': + resolution: {integrity: sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==} + engines: {node: ^14.21.3 || >=16} + + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@open-draft/deferred-promise@2.2.0': + resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} + + '@open-draft/logger@0.3.0': + resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==} + + '@open-draft/until@2.1.0': + resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + + '@oxc-project/types@0.122.0': + resolution: {integrity: sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==} + + '@radix-ui/primitive@1.1.3': + resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} + + '@radix-ui/react-compose-refs@1.1.2': + resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-context@1.1.2': + resolution: {integrity: sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-dismissable-layer@1.1.11': + resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-focus-guards@1.1.3': + resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-focus-scope@1.1.7': + resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-id@1.1.1': + resolution: {integrity: sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-portal@1.1.9': + resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-primitive@2.1.3': + resolution: {integrity: sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-primitive@2.1.4': + resolution: {integrity: sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-slot@1.2.3': + resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-slot@1.2.4': + resolution: {integrity: sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-callback-ref@1.1.1': + resolution: {integrity: sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-controllable-state@1.2.2': + resolution: {integrity: sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-effect-event@0.0.2': + resolution: {integrity: sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-escape-keydown@1.1.1': + resolution: {integrity: sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-layout-effect@1.1.1': + resolution: {integrity: sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@reduxjs/toolkit@2.11.2': + resolution: {integrity: sha512-Kd6kAHTA6/nUpp8mySPqj3en3dm0tdMIgbttnQ1xFMVpufoj+ADi8pXLBsd4xzTRHQa7t/Jv8W5UnCuW4kuWMQ==} + peerDependencies: + react: ^16.9.0 || ^17.0.0 || ^18 || ^19 + react-redux: ^7.2.1 || ^8.1.3 || ^9.0.0 + peerDependenciesMeta: + react: + optional: true + react-redux: + optional: true + + '@rolldown/binding-android-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-pv1y2Fv0JybcykuiiD3qBOBdz6RteYojRFY1d+b95WVuzx211CRh+ytI/+9iVyWQ6koTh5dawe4S/yRfOFjgaA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [android] + + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-cFYr6zTG/3PXXF3pUO+umXxt1wkRK/0AYT8lDwuqvRC+LuKYWSAQAQZjCWDQpAH172ZV6ieYrNnFzVVcnSflAg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [darwin] + + '@rolldown/binding-darwin-x64@1.0.0-rc.12': + resolution: {integrity: sha512-ZCsYknnHzeXYps0lGBz8JrF37GpE9bFVefrlmDrAQhOEi4IOIlcoU1+FwHEtyXGx2VkYAvhu7dyBf75EJQffBw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [darwin] + + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': + resolution: {integrity: sha512-dMLeprcVsyJsKolRXyoTH3NL6qtsT0Y2xeuEA8WQJquWFXkEC4bcu1rLZZSnZRMtAqwtrF/Ib9Ddtpa/Gkge9Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [freebsd] + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': + resolution: {integrity: sha512-YqWjAgGC/9M1lz3GR1r1rP79nMgo3mQiiA+Hfo+pvKFK1fAJ1bCi0ZQVh8noOqNacuY1qIcfyVfP6HoyBRZ85Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-/I5AS4cIroLpslsmzXfwbe5OmWvSsrFuEw3mwvbQ1kDxJ822hFHIx+vsN/TAzNVyepI/j/GSzrtCIwQPeKCLIg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-V6/wZztnBqlx5hJQqNWwFdxIKN0m38p8Jas+VoSfgH54HSj9tKTt1dZvG6JRHcjh6D7TvrJPWFGaY9UBVOaWPw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-AP3E9BpcUYliZCxa3w5Kwj9OtEVDYK6sVoUzy4vTOJsjPOgdaJZKFmN4oOlX0Wp0RPV2ETfmIra9x1xuayFB7g==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-nWwpvUSPkoFmZo0kQazZYOrT7J5DGOJ/+QHHzjvNlooDZED8oH82Yg67HvehPPLAg5fUff7TfWFHQS8IV1n3og==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-RNrafz5bcwRy+O9e6P8Z/OCAJW/A+qtBczIqVYwTs14pf4iV1/+eKEjdOUta93q2TsT/FI0XYDP3TCky38LMAg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-Jpw/0iwoKWx3LJ2rc1yjFrj+T7iHZn2JDg1Yny1ma0luviFS4mhAIcd1LFNxK3EYu3DHWCps0ydXQ5i/rrJ2ig==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [musl] + + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-vRugONE4yMfVn0+7lUKdKvN4D5YusEiPilaoO2sgUWpCvrncvWgPMzK00ZFFJuiPgLwgFNP5eSiUlv2tfc+lpA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [openharmony] + + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12': + resolution: {integrity: sha512-ykGiLr/6kkiHc0XnBfmFJuCjr5ZYKKofkx+chJWDjitX+KsJuAmrzWhwyOMSHzPhzOHOy7u9HlFoa5MoAOJ/Zg==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-5eOND4duWkwx1AzCxadcOrNeighiLwMInEADT0YM7xeEOOFcovWZCq8dadXgcRHSf3Ulh1kFo/qvzoFiCLOL1Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [win32] + + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-PyqoipaswDLAZtot351MLhrlrh6lcZPo2LSYE+VDxbVk24LVKAGOuE4hb8xZQmrPAuEtTZW8E6D2zc5EUZX4Lw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [win32] + + '@rolldown/pluginutils@1.0.0-rc.12': + resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==} + + '@rolldown/pluginutils@1.0.0-rc.7': + resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} + + '@sec-ant/readable-stream@0.4.1': + resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==} + + '@shikijs/core@1.29.2': + resolution: {integrity: sha512-vju0lY9r27jJfOY4Z7+Rt/nIOjzJpZ3y+nYpqtUZInVoXQ/TJZcfGnNOGnKjFdVZb8qexiCuSlZRKcGfhhTTZQ==} + + '@shikijs/engine-javascript@1.29.2': + resolution: {integrity: sha512-iNEZv4IrLYPv64Q6k7EPpOCE/nuvGiKl7zxdq0WFuRPF5PAE9PRo2JGq/d8crLusM59BRemJ4eOqrFrC4wiQ+A==} + + '@shikijs/engine-oniguruma@1.29.2': + resolution: {integrity: sha512-7iiOx3SG8+g1MnlzZVDYiaeHe7Ez2Kf2HrJzdmGwkRisT7r4rak0e655AcM/tF9JG/kg5fMNYlLLKglbN7gBqA==} + + '@shikijs/langs@1.29.2': + resolution: {integrity: sha512-FIBA7N3LZ+223U7cJDUYd5shmciFQlYkFXlkKVaHsCPgfVLiO+e12FmQE6Tf9vuyEsFe3dIl8qGWKXgEHL9wmQ==} + + '@shikijs/themes@1.29.2': + resolution: {integrity: sha512-i9TNZlsq4uoyqSbluIcZkmPL9Bfi3djVxRnofUHwvx/h6SRW3cwgBC5SML7vsDcWyukY0eCzVN980rqP6qNl9g==} + + '@shikijs/types@1.29.2': + resolution: {integrity: sha512-VJjK0eIijTZf0QSTODEXCqinjBn0joAHQ+aPSBzrv4O2d/QSbsMw+ZeSRx03kV34Hy7NzUvV/7NqfYGRLrASmw==} + + '@shikijs/vscode-textmate@10.0.2': + resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} + + '@sindresorhus/merge-streams@4.0.0': + resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==} + engines: {node: '>=18'} + + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@standard-schema/utils@0.3.0': + resolution: {integrity: sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==} + + '@swc/core-darwin-arm64@1.15.21': + resolution: {integrity: sha512-SA8SFg9dp0qKRH8goWsax6bptFE2EdmPf2YRAQW9WoHGf3XKM1bX0nd5UdwxmC5hXsBUZAYf7xSciCler6/oyA==} + engines: {node: '>=10'} + cpu: [arm64] + os: [darwin] + + '@swc/core-darwin-x64@1.15.21': + resolution: {integrity: sha512-//fOVntgowz9+V90lVsNCtyyrtbHp3jWH6Rch7MXHXbcvbLmbCTmssl5DeedUWLLGiAAW1wksBdqdGYOTjaNLw==} + engines: {node: '>=10'} + cpu: [x64] + os: [darwin] + + '@swc/core-linux-arm-gnueabihf@1.15.21': + resolution: {integrity: sha512-meNI4Sh6h9h8DvIfEc0l5URabYMSuNvyisLmG6vnoYAS43s8ON3NJR8sDHvdP7NJTrLe0q/x2XCn6yL/BeHcZg==} + engines: {node: '>=10'} + cpu: [arm] + os: [linux] + + '@swc/core-linux-arm64-gnu@1.15.21': + resolution: {integrity: sha512-QrXlNQnHeXqU2EzLlnsPoWEh8/GtNJLvfMiPsDhk+ht6Xv8+vhvZ5YZ/BokNWSIZiWPKLAqR0M7T92YF5tmD3g==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@swc/core-linux-arm64-musl@1.15.21': + resolution: {integrity: sha512-8/yGCMO333ultDaMQivE5CjO6oXDPeeg1IV4sphojPkb0Pv0i6zvcRIkgp60xDB+UxLr6VgHgt+BBgqS959E9g==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@swc/core-linux-ppc64-gnu@1.15.21': + resolution: {integrity: sha512-ucW0HzPx0s1dgRvcvuLSPSA/2Kk/VYTv9st8qe1Kc22Gu0Q0rH9+6TcBTmMuNIp0Xs4BPr1uBttmbO1wEGI49Q==} + engines: {node: '>=10'} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@swc/core-linux-s390x-gnu@1.15.21': + resolution: {integrity: sha512-ulTnOGc5I7YRObE/9NreAhQg94QkiR5qNhhcUZ1iFAYjzg/JGAi1ch+s/Ixe61pMIr8bfVrF0NOaB0f8wjaAfA==} + engines: {node: '>=10'} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@swc/core-linux-x64-gnu@1.15.21': + resolution: {integrity: sha512-D0RokxtM+cPvSqJIKR6uja4hbD+scI9ezo95mBhfSyLUs9wnPPl26sLp1ZPR/EXRdYm3F3S6RUtVi+8QXhT24Q==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@swc/core-linux-x64-musl@1.15.21': + resolution: {integrity: sha512-nER8u7VeRfmU6fMDzl1NQAbbB/G7O2avmvCOwIul1uGkZ2/acbPH+DCL9h5+0yd/coNcxMBTL6NGepIew+7C2w==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@swc/core-win32-arm64-msvc@1.15.21': + resolution: {integrity: sha512-+/AgNBnjYugUA8C0Do4YzymgvnGbztv7j8HKSQLvR/DQgZPoXQ2B3PqB2mTtGh/X5DhlJWiqnunN35JUgWcAeQ==} + engines: {node: '>=10'} + cpu: [arm64] + os: [win32] + + '@swc/core-win32-ia32-msvc@1.15.21': + resolution: {integrity: sha512-IkSZj8PX/N4HcaFhMQtzmkV8YSnuNoJ0E6OvMwFiOfejPhiKXvl7CdDsn1f4/emYEIDO3fpgZW9DTaCRMDxaDA==} + engines: {node: '>=10'} + cpu: [ia32] + os: [win32] + + '@swc/core-win32-x64-msvc@1.15.21': + resolution: {integrity: sha512-zUyWso7OOENB6e1N1hNuNn8vbvLsTdKQ5WKLgt/JcBNfJhKy/6jmBmqI3GXk/MyvQKd5SLvP7A0F36p7TeDqvw==} + engines: {node: '>=10'} + cpu: [x64] + os: [win32] + + '@swc/core@1.15.21': + resolution: {integrity: sha512-fkk7NJcBscrR3/F8jiqlMptRHP650NxqDnspBMrRe5d8xOoCy9MLL5kOBLFXjFLfMo3KQQHhk+/jUULOMlR1uQ==} + engines: {node: '>=10'} + peerDependencies: + '@swc/helpers': '>=0.5.17' + peerDependenciesMeta: + '@swc/helpers': + optional: true + + '@swc/counter@0.1.3': + resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} + + '@swc/types@0.1.26': + resolution: {integrity: sha512-lyMwd7WGgG79RS7EERZV3T8wMdmPq3xwyg+1nmAM64kIhx5yl+juO2PYIHb7vTiPgPCj8LYjsNV2T5wiQHUEaw==} + + '@tabby_ai/hijri-converter@1.0.5': + resolution: {integrity: sha512-r5bClKrcIusDoo049dSL8CawnHR6mRdDwhlQuIgZRNty68q0x8k3Lf1BtPAMxRf/GgnHBnIO4ujd3+GQdLWzxQ==} + engines: {node: '>=16.0.0'} + + '@tailwindcss/node@4.2.2': + resolution: {integrity: sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==} + + '@tailwindcss/oxide-android-arm64@4.2.2': + resolution: {integrity: sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [android] + + '@tailwindcss/oxide-darwin-arm64@4.2.2': + resolution: {integrity: sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [darwin] + + '@tailwindcss/oxide-darwin-x64@4.2.2': + resolution: {integrity: sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==} + engines: {node: '>= 20'} + cpu: [x64] + os: [darwin] + + '@tailwindcss/oxide-freebsd-x64@4.2.2': + resolution: {integrity: sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [freebsd] + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': + resolution: {integrity: sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==} + engines: {node: '>= 20'} + cpu: [arm] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': + resolution: {integrity: sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-arm64-musl@4.2.2': + resolution: {integrity: sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-linux-x64-gnu@4.2.2': + resolution: {integrity: sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-x64-musl@4.2.2': + resolution: {integrity: sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-wasm32-wasi@4.2.2': + resolution: {integrity: sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + bundledDependencies: + - '@napi-rs/wasm-runtime' + - '@emnapi/core' + - '@emnapi/runtime' + - '@tybys/wasm-util' + - '@emnapi/wasi-threads' + - tslib + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': + resolution: {integrity: sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [win32] + + '@tailwindcss/oxide-win32-x64-msvc@4.2.2': + resolution: {integrity: sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [win32] + + '@tailwindcss/oxide@4.2.2': + resolution: {integrity: sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==} + engines: {node: '>= 20'} + + '@tailwindcss/vite@4.2.2': + resolution: {integrity: sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==} + peerDependencies: + vite: ^5.2.0 || ^6 || ^7 || ^8 + + '@tanstack/query-core@5.96.0': + resolution: {integrity: sha512-sfO3uQeol1BU7cRP6NYY7nAiX3GiNY20lI/dtSbKLwcIkYw/X+w/tEsQAkc544AfIhBX/IvH/QYtPHrPhyAKGw==} + + '@tanstack/react-query@5.96.0': + resolution: {integrity: sha512-6qbjdm1K5kizVKv9TNqhIN3doq2anRhdF2XaFMFSn4m8L22S69RV+FilvlyVT4RoJyMxtPU5rs4RpdFa/PEC7A==} + peerDependencies: + react: ^18 || ^19 + + '@tanstack/react-virtual@3.13.23': + resolution: {integrity: sha512-XnMRnHQ23piOVj2bzJqHrRrLg4r+F86fuBcwteKfbIjJrtGxb4z7tIvPVAe4B+4UVwo9G4Giuz5fmapcrnZ0OQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@tanstack/virtual-core@3.13.23': + resolution: {integrity: sha512-zSz2Z2HNyLjCplANTDyl3BcdQJc2k1+yyFoKhNRmCr7V7dY8o8q5m8uFTI1/Pg1kL+Hgrz6u3Xo6eFUB7l66cg==} + + '@ts-morph/common@0.27.0': + resolution: {integrity: sha512-Wf29UqxWDpc+i61k3oIOzcUfQt79PIT9y/MWfAGlrkjg6lBC1hwDECLXPVJAhWjiGbfBCxZd65F/LIZF3+jeJQ==} + + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + + '@types/d3-array@3.2.2': + resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==} + + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-ease@3.0.2': + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-path@3.1.1': + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + + '@types/d3-scale@4.0.9': + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} + + '@types/d3-shape@3.1.8': + resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==} + + '@types/d3-time@3.0.4': + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + + '@types/d3-timer@3.0.2': + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + + '@types/debug@4.1.13': + resolution: {integrity: sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==} + + '@types/estree-jsx@1.0.5': + resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + + '@types/ms@2.1.0': + resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} + + '@types/node@24.12.0': + resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==} + + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + peerDependencies: + '@types/react': ^19.2.0 + + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + + '@types/statuses@2.0.6': + resolution: {integrity: sha512-xMAgYwceFhRA2zY+XbEA7mxYbA093wdiW8Vu6gZPGWy9cmOyU9XesH1tNcEWsKFd5Vzrqx5T3D38PWx1FIIXkA==} + + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + + '@types/use-sync-external-store@0.0.6': + resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==} + + '@types/validate-npm-package-name@4.0.2': + resolution: {integrity: sha512-lrpDziQipxCEeK5kWxvljWYhUvOiB2A9izZd9B2AFarYAkqZshb4lPbRs7zKEic6eGtH8V/2qJW+dPp9OtF6bw==} + + '@typescript-eslint/eslint-plugin@8.58.0': + resolution: {integrity: sha512-RLkVSiNuUP1C2ROIWfqX+YcUfLaSnxGE/8M+Y57lopVwg9VTYYfhuz15Yf1IzCKgZj6/rIbYTmJCUSqr76r0Wg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.58.0 + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/parser@8.58.0': + resolution: {integrity: sha512-rLoGZIf9afaRBYsPUMtvkDWykwXwUPL60HebR4JgTI8mxfFe2cQTu3AGitANp4b9B2QlVru6WzjgB2IzJKiCSA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/project-service@8.58.0': + resolution: {integrity: sha512-8Q/wBPWLQP1j16NxoPNIKpDZFMaxl7yWIoqXWYeWO+Bbd2mjgvoF0dxP2jKZg5+x49rgKdf7Ck473M8PC3V9lg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/scope-manager@8.58.0': + resolution: {integrity: sha512-W1Lur1oF50FxSnNdGp3Vs6P+yBRSmZiw4IIjEeYxd8UQJwhUF0gDgDD/W/Tgmh73mxgEU3qX0Bzdl/NGuSPEpQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.58.0': + resolution: {integrity: sha512-doNSZEVJsWEu4htiVC+PR6NpM+pa+a4ClH9INRWOWCUzMst/VA9c4gXq92F8GUD1rwhNvRLkgjfYtFXegXQF7A==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/type-utils@8.58.0': + resolution: {integrity: sha512-aGsCQImkDIqMyx1u4PrVlbi/krmDsQUs4zAcCV6M7yPcPev+RqVlndsJy9kJ8TLihW9TZ0kbDAzctpLn5o+lOg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/types@8.58.0': + resolution: {integrity: sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.58.0': + resolution: {integrity: sha512-7vv5UWbHqew/dvs+D3e1RvLv1v2eeZ9txRHPnEEBUgSNLx5ghdzjHa0sgLWYVKssH+lYmV0JaWdoubo0ncGYLA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/utils@8.58.0': + resolution: {integrity: sha512-RfeSqcFeHMHlAWzt4TBjWOAtoW9lnsAGiP3GbaX9uVgTYYrMbVnGONEfUCiSss+xMHFl+eHZiipmA8WkQ7FuNA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.1.0' + + '@typescript-eslint/visitor-keys@8.58.0': + resolution: {integrity: sha512-XJ9UD9+bbDo4a4epraTwG3TsNPeiB9aShrUneAVXy8q4LuwowN+qu89/6ByLMINqvIMeI9H9hOHQtg/ijrYXzQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + '@vitejs/plugin-react-swc@4.3.0': + resolution: {integrity: sha512-mOkXCII839dHyAt/gpoSlm28JIVDwhZ6tnG6wJxUy2bmOx7UaPjvOyIDf3SFv5s7Eo7HVaq6kRcu6YMEzt5Z7w==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + vite: ^4 || ^5 || ^6 || ^7 || ^8 + + '@vitejs/plugin-react@6.0.1': + resolution: {integrity: sha512-l9X/E3cDb+xY3SWzlG1MOGt2usfEHGMNIaegaUGFsLkb3RCn/k8/TOXBcab+OndDI4TBtktT8/9BwwW8Vi9KUQ==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + '@rolldown/plugin-babel': ^0.1.7 || ^0.2.0 + babel-plugin-react-compiler: ^1.0.0 + vite: ^8.0.0 + peerDependenciesMeta: + '@rolldown/plugin-babel': + optional: true + babel-plugin-react-compiler: + optional: true + + accepts@2.0.0: + resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} + engines: {node: '>= 0.6'} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + ajv-formats@3.0.1: + resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + + ajv@6.14.0: + resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} + + ajv@8.18.0: + resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} + + ansi-colors@4.1.3: + resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} + engines: {node: '>=6'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + aria-hidden@1.2.6: + resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} + engines: {node: '>=10'} + + ast-types@0.16.1: + resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} + engines: {node: '>=4'} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.14.0: + resolution: {integrity: sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==} + + bail@2.0.2: + resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} + + baseline-browser-mapping@2.10.12: + resolution: {integrity: sha512-qyq26DxfY4awP2gIRXhhLWfwzwI+N5Nxk6iQi8EFizIaWIjqicQTE4sLnZZVdeKPRcVNoJOkkpfzoIYuvCKaIQ==} + engines: {node: '>=6.0.0'} + hasBin: true + + body-parser@2.2.2: + resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} + engines: {node: '>=18'} + + brace-expansion@1.1.13: + resolution: {integrity: sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==} + + brace-expansion@5.0.5: + resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==} + engines: {node: 18 || 20 || >=22} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.28.1: + resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + bundle-name@4.1.0: + resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} + engines: {node: '>=18'} + + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + + c12@3.3.3: + resolution: {integrity: sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q==} + peerDependencies: + magicast: '*' + peerDependenciesMeta: + magicast: + optional: true + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + caniuse-lite@1.0.30001782: + resolution: {integrity: sha512-dZcaJLJeDMh4rELYFw1tvSn1bhZWYFOt468FcbHHxx/Z/dFidd1I6ciyFdi3iwfQCyOjqo9upF6lGQYtMiJWxw==} + + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chalk@5.6.2: + resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + character-reference-invalid@2.0.1: + resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + + chokidar@5.0.0: + resolution: {integrity: sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==} + engines: {node: '>= 20.19.0'} + + citty@0.1.6: + resolution: {integrity: sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==} + + citty@0.2.1: + resolution: {integrity: sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg==} + + class-variance-authority@0.7.1: + resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} + + cli-cursor@5.0.0: + resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} + engines: {node: '>=18'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + + cli-width@4.1.0: + resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} + engines: {node: '>= 12'} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + + cmdk@1.1.1: + resolution: {integrity: sha512-Vsv7kFaXm+ptHDMZ7izaRsP70GgrW9NBNGswt9OZaVBLlE0SNpDq8eu/VGXyF9r7M0azK3Wy7OlYXsuyYLFzHg==} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + react-dom: ^18 || ^19 || ^19.0.0-rc + + code-block-writer@13.0.3: + resolution: {integrity: sha512-Oofo0pq3IKnsFtuHqSF7TqBfr71aeyZDVJ0HpmqB7FBM2qEigL0iPONSCZSO9pE9dZTAxANe5XHG9Uy0YMv8cg==} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + color-support@1.1.3: + resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} + hasBin: true + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + + commander@11.1.0: + resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} + engines: {node: '>=16'} + + commander@14.0.3: + resolution: {integrity: sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==} + engines: {node: '>=20'} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + confbox@0.2.4: + resolution: {integrity: sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ==} + + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + + content-disposition@1.0.1: + resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==} + engines: {node: '>=18'} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + cookie-signature@1.2.2: + resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==} + engines: {node: '>=6.6.0'} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + + cookie@1.1.1: + resolution: {integrity: sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==} + engines: {node: '>=18'} + + cors@2.8.6: + resolution: {integrity: sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==} + engines: {node: '>= 0.10'} + + cosmiconfig@9.0.1: + resolution: {integrity: sha512-hr4ihw+DBqcvrsEDioRO31Z17x71pUYoNe/4h6Z0wB72p7MU7/9gH8Q3s12NFhHPfYBBOV3qyfUxmr/Yn3shnQ==} + engines: {node: '>=14'} + peerDependencies: + typescript: '>=4.9.5' + peerDependenciesMeta: + typescript: + optional: true + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + cssesc@3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} + hasBin: true + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-format@3.1.2: + resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + + date-fns-jalali@4.1.0-0: + resolution: {integrity: sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==} + + date-fns@4.1.0: + resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decimal.js-light@2.5.1: + resolution: {integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==} + + decode-named-character-reference@1.3.0: + resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} + + dedent@1.7.2: + resolution: {integrity: sha512-WzMx3mW98SN+zn3hgemf4OzdmyNhhhKz5Ay0pUfQiMQ3e1g+xmTJWp/pKdwKVXhdSkAEGIIzqeuWrL3mV/AXbA==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + + default-browser-id@5.0.1: + resolution: {integrity: sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==} + engines: {node: '>=18'} + + default-browser@5.5.0: + resolution: {integrity: sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==} + engines: {node: '>=18'} + + define-lazy-prop@3.0.0: + resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} + engines: {node: '>=12'} + + defu@6.1.4: + resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + destr@2.0.5: + resolution: {integrity: sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==} + + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + + detect-node-es@1.1.0: + resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} + + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + + diff@8.0.4: + resolution: {integrity: sha512-DPi0FmjiSU5EvQV0++GFDOJ9ASQUVFh5kD+OzOnYdi7n3Wpm9hWWGfB/O2blfHcMVTL5WkQXSnRiK9makhrcnw==} + engines: {node: '>=0.3.1'} + + dotenv@17.3.1: + resolution: {integrity: sha512-IO8C/dzEb6O3F9/twg6ZLXz164a2fhTnEWb95H23Dm4OuN+92NmEAlTrupP9VW6Jm3sO26tQlqyvyi4CsnY9GA==} + engines: {node: '>=12'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + eciesjs@0.4.18: + resolution: {integrity: sha512-wG99Zcfcys9fZux7Cft8BAX/YrOJLJSZ3jyYPfhZHqN2E+Ffx+QXBDsv3gubEgPtV6dTzJMSQUwk1H98/t/0wQ==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + + ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + + electron-to-chromium@1.5.329: + resolution: {integrity: sha512-/4t+AS1l4S3ZC0Ja7PHFIWeBIxGA3QGqV8/yKsP36v7NcyUCl+bIcmw6s5zVuMIECWwBrAK/6QLzTmbJChBboQ==} + + embla-carousel-react@8.6.0: + resolution: {integrity: sha512-0/PjqU7geVmo6F734pmPqpyHqiM99olvyecY7zdweCw+6tKEXnrE90pBiBbMMU8s5tICemzpQ3hi5EpxzGW+JA==} + peerDependencies: + react: ^16.8.0 || ^17.0.1 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + + embla-carousel-reactive-utils@8.6.0: + resolution: {integrity: sha512-fMVUDUEx0/uIEDM0Mz3dHznDhfX+znCCDCeIophYb1QGVM7YThSWX+wz11zlYwWFOr74b4QLGg0hrGPJeG2s4A==} + peerDependencies: + embla-carousel: 8.6.0 + + embla-carousel@8.6.0: + resolution: {integrity: sha512-SjWyZBHJPbqxHOzckOfo8lHisEaJWmwd23XppYFYVh10bU66/Pn5tkVkbkCMZVdbUE5eTCI2nD8OyIP4Z+uwkA==} + + emoji-regex-xs@1.0.0: + resolution: {integrity: sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==} + + emoji-regex@10.6.0: + resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + + enhanced-resolve@5.20.1: + resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} + engines: {node: '>=10.13.0'} + + env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + + error-ex@1.3.4: + resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + es-toolkit@1.45.1: + resolution: {integrity: sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escape-string-regexp@5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + + eslint-plugin-react-hooks@7.0.1: + resolution: {integrity: sha512-O0d0m04evaNzEPoSW+59Mezf8Qt0InfgGIBJnpC0h3NH/WjUAR7BIKUfysC6todmtiZ/A0oUVS8Gce0WhBrHsA==} + engines: {node: '>=18'} + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 + + eslint-plugin-react-refresh@0.5.2: + resolution: {integrity: sha512-hmgTH57GfzoTFjVN0yBwTggnsVUF2tcqi7RJZHqi9lIezSs4eFyAMktA68YD4r5kNw1mxyY4dmkyoFDb3FIqrA==} + peerDependencies: + eslint: ^9 || ^10 + + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@5.0.1: + resolution: {integrity: sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + + eslint@9.39.4: + resolution: {integrity: sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + estree-util-is-identifier-name@3.0.0: + resolution: {integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + + eventemitter3@5.0.4: + resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==} + + eventsource-parser@3.0.6: + resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} + engines: {node: '>=18.0.0'} + + eventsource@3.0.7: + resolution: {integrity: sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==} + engines: {node: '>=18.0.0'} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + execa@9.6.1: + resolution: {integrity: sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA==} + engines: {node: ^18.19.0 || >=20.5.0} + + express-rate-limit@8.3.2: + resolution: {integrity: sha512-77VmFeJkO0/rvimEDuUC5H30oqUC4EyOhyGccfqoLebB0oiEYfM7nwPrsDsBL1gsTpwfzX8SFy2MT3TDyRq+bg==} + engines: {node: '>= 16'} + peerDependencies: + express: '>= 4.11' + + express@5.2.1: + resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==} + engines: {node: '>= 18'} + + exsolve@1.0.8: + resolution: {integrity: sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + + figures@6.1.0: + resolution: {integrity: sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==} + engines: {node: '>=18'} + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + finalhandler@2.1.1: + resolution: {integrity: sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==} + engines: {node: '>= 18.0.0'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flatted@3.4.2: + resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + + forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + + framer-motion@11.18.2: + resolution: {integrity: sha512-5F5Och7wrvtLVElIpclDT0CBzMVg3dL22B64aZwHtsIY8RB4mXICLrkajK4G9R+ieSAGcgrLeae2SeUTg2pr6w==} + peerDependencies: + '@emotion/is-prop-valid': '*' + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@emotion/is-prop-valid': + optional: true + react: + optional: true + react-dom: + optional: true + + fresh@2.0.0: + resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} + engines: {node: '>= 0.8'} + + fs-extra@11.3.4: + resolution: {integrity: sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==} + engines: {node: '>=14.14'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + fuzzysort@3.1.0: + resolution: {integrity: sha512-sR9BNCjBg6LNgwvxlBd0sBABvQitkLzoVY9MYYROQVX/FvfJ4Mai9LsGhDgd8qYdds0bY77VzYd5iuB+v5rwQQ==} + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-east-asian-width@1.5.0: + resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==} + engines: {node: '>=18'} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-nonce@1.0.1: + resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} + engines: {node: '>=6'} + + get-own-enumerable-keys@1.0.0: + resolution: {integrity: sha512-PKsK2FSrQCyxcGHsGrLDcK0lx+0Ke+6e8KFFozA9/fIQLhQzPaRvJFdcz7+Axg3jUH/Mq+NI4xa5u/UT2tQskA==} + engines: {node: '>=14.16'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + get-stream@9.0.1: + resolution: {integrity: sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==} + engines: {node: '>=18'} + + get-tsconfig@4.13.6: + resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==} + + giget@2.0.0: + resolution: {integrity: sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==} + hasBin: true + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + globals@17.4.0: + resolution: {integrity: sha512-hjrNztw/VajQwOLsMNT1cbJiH2muO3OROCHnbehc8eY5JyD2gqz4AcMHPqgaOR59DjgUjYAYLeH699g/eWi2jw==} + engines: {node: '>=18'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + graphql@16.13.2: + resolution: {integrity: sha512-5bJ+nf/UCpAjHM8i06fl7eLyVC9iuNAjm9qzkiu2ZGhM0VscSvS6WDPfAwkdkBuoXGM9FJSbKl6wylMwP9Ktig==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hast-util-to-html@9.0.5: + resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==} + + hast-util-to-jsx-runtime@2.3.6: + resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + headers-polyfill@4.0.3: + resolution: {integrity: sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==} + + hermes-estree@0.25.1: + resolution: {integrity: sha512-0wUoCcLp+5Ev5pDW2OriHC2MJCbwLwuRx+gAqMTOkGKJJiBCLjtrvy4PWUGn6MIVefecRpzoOZ/UV6iGdOr+Cw==} + + hermes-parser@0.25.1: + resolution: {integrity: sha512-6pEjquH3rqaI6cYAXYPcz9MS4rY6R4ngRgrgfDshRptUZIc3lw0MCIJIGDj9++mfySOuPTHB4nrSW99BCvOPIA==} + + hono@4.12.9: + resolution: {integrity: sha512-wy3T8Zm2bsEvxKZM5w21VdHDDcwVS1yUFFY6i8UobSsKfFceT7TOwhbhfKsDyx7tYQlmRM5FLpIuYvNFyjctiA==} + engines: {node: '>=16.9.0'} + + html-url-attributes@3.0.1: + resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==} + + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + + http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + human-signals@8.0.1: + resolution: {integrity: sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==} + engines: {node: '>=18.18.0'} + + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + immer@10.2.0: + resolution: {integrity: sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==} + + immer@11.1.4: + resolution: {integrity: sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw==} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + inline-style-parser@0.2.7: + resolution: {integrity: sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==} + + input-otp@1.4.2: + resolution: {integrity: sha512-l3jWwYNvrEa6NTCt7BECfCm48GvwuZzkoeG3gBL2w4CHeOXW3eKFmf9UNYkNfYc3mxMrthMnxjIE07MT0zLBQA==} + peerDependencies: + react: ^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + ip-address@10.1.0: + resolution: {integrity: sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==} + engines: {node: '>= 12'} + + ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + + is-alphabetical@2.0.1: + resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} + + is-alphanumerical@2.0.1: + resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-decimal@2.0.1: + resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + + is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-hexadecimal@2.0.1: + resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} + + is-in-ssh@1.0.0: + resolution: {integrity: sha512-jYa6Q9rH90kR1vKB6NM7qqd1mge3Fx4Dhw5TVlK1MUBqhEOuCagrEHMevNuCcbECmXZ0ThXkRm+Ymr51HwEPAw==} + engines: {node: '>=20'} + + is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + + is-interactive@2.0.0: + resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} + engines: {node: '>=12'} + + is-node-process@1.2.0: + resolution: {integrity: sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-obj@3.0.0: + resolution: {integrity: sha512-IlsXEHOjtKhpN8r/tRFj2nDyTmHvcfNeu/nrRIcXE17ROeatXchkojffa1SpdqW4cr/Fj6QkEf/Gn4zf6KKvEQ==} + engines: {node: '>=12'} + + is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} + + is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + + is-regexp@3.1.0: + resolution: {integrity: sha512-rbku49cWloU5bSMI+zaRaXdQHXnthP6DZ/vLnfdSKyL4zUzuWnomtOEiZZOd+ioQ+avFo/qau3KPTc7Fjy1uPA==} + engines: {node: '>=12'} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-stream@4.0.1: + resolution: {integrity: sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==} + engines: {node: '>=18'} + + is-unicode-supported@1.3.0: + resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} + engines: {node: '>=12'} + + is-unicode-supported@2.1.0: + resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} + engines: {node: '>=18'} + + is-wsl@3.1.1: + resolution: {integrity: sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw==} + engines: {node: '>=16'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + isexe@3.1.5: + resolution: {integrity: sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==} + engines: {node: '>=18'} + + jiti@2.6.1: + resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} + hasBin: true + + jose@6.2.2: + resolution: {integrity: sha512-d7kPDd34KO/YnzaDOlikGpOurfF0ByC2sEV4cANCtdqLlTfBlw2p14O/5d/zv40gJPbIQxfES3nSx1/oYNyuZQ==} + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + jsencrypt@3.5.4: + resolution: {integrity: sha512-kNjfYEMNASxrDGsmcSQh/rUTmcoRfSUkxnAz+MMywM8jtGu+fFEZ3nJjHM58zscVnwR0fYmG9sGkTDjqUdpiwA==} + + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + json-schema-typed@8.0.2: + resolution: {integrity: sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + jsonfile@6.2.0: + resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + kleur@3.0.3: + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + + kleur@4.1.5: + resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} + engines: {node: '>=6'} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + lightningcss-android-arm64@1.32.0: + resolution: {integrity: sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.32.0: + resolution: {integrity: sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.32.0: + resolution: {integrity: sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.32.0: + resolution: {integrity: sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.32.0: + resolution: {integrity: sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.32.0: + resolution: {integrity: sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + lightningcss-linux-arm64-musl@1.32.0: + resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + lightningcss-linux-x64-gnu@1.32.0: + resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + lightningcss-linux-x64-musl@1.32.0: + resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + lightningcss-win32-arm64-msvc@1.32.0: + resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.32.0: + resolution: {integrity: sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.32.0: + resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} + engines: {node: '>= 12.0.0'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + log-symbols@6.0.0: + resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} + engines: {node: '>=18'} + + longest-streak@3.1.0: + resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + + lucide-react@1.7.0: + resolution: {integrity: sha512-yI7BeItCLZJTXikmK4KNUGCKoGzSvbKlfCvw44bU4fXAL6v3gYS4uHD1jzsLkfwODYwI6Drw5Tu9Z5ulDe0TSg==} + peerDependencies: + react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + markdown-table@3.0.4: + resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mdast-util-find-and-replace@3.0.2: + resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==} + + mdast-util-from-markdown@2.0.3: + resolution: {integrity: sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==} + + mdast-util-gfm-autolink-literal@2.0.1: + resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==} + + mdast-util-gfm-footnote@2.1.0: + resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==} + + mdast-util-gfm-strikethrough@2.0.0: + resolution: {integrity: sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==} + + mdast-util-gfm-table@2.0.0: + resolution: {integrity: sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==} + + mdast-util-gfm-task-list-item@2.0.0: + resolution: {integrity: sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==} + + mdast-util-gfm@3.1.0: + resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==} + + mdast-util-mdx-expression@2.0.1: + resolution: {integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==} + + mdast-util-mdx-jsx@3.2.0: + resolution: {integrity: sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==} + + mdast-util-mdxjs-esm@2.0.1: + resolution: {integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==} + + mdast-util-phrasing@4.1.0: + resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + + mdast-util-to-hast@13.2.1: + resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==} + + mdast-util-to-markdown@2.1.2: + resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} + + mdast-util-to-string@4.0.0: + resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + + media-typer@1.1.0: + resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} + engines: {node: '>= 0.8'} + + merge-descriptors@2.0.0: + resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} + engines: {node: '>=18'} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromark-core-commonmark@2.0.3: + resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==} + + micromark-extension-gfm-autolink-literal@2.1.0: + resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==} + + micromark-extension-gfm-footnote@2.1.0: + resolution: {integrity: sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==} + + micromark-extension-gfm-strikethrough@2.1.0: + resolution: {integrity: sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==} + + micromark-extension-gfm-table@2.1.1: + resolution: {integrity: sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==} + + micromark-extension-gfm-tagfilter@2.0.0: + resolution: {integrity: sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==} + + micromark-extension-gfm-task-list-item@2.1.0: + resolution: {integrity: sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==} + + micromark-extension-gfm@3.0.0: + resolution: {integrity: sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==} + + micromark-factory-destination@2.0.1: + resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==} + + micromark-factory-label@2.0.1: + resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==} + + micromark-factory-space@2.0.1: + resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==} + + micromark-factory-title@2.0.1: + resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==} + + micromark-factory-whitespace@2.0.1: + resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==} + + micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} + + micromark-util-chunked@2.0.1: + resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==} + + micromark-util-classify-character@2.0.1: + resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==} + + micromark-util-combine-extensions@2.0.1: + resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==} + + micromark-util-decode-numeric-character-reference@2.0.2: + resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==} + + micromark-util-decode-string@2.0.1: + resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==} + + micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} + + micromark-util-html-tag-name@2.0.1: + resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==} + + micromark-util-normalize-identifier@2.0.1: + resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==} + + micromark-util-resolve-all@2.0.1: + resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==} + + micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} + + micromark-util-subtokenize@2.1.0: + resolution: {integrity: sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==} + + micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} + + micromark-util-types@2.0.2: + resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==} + + micromark@4.0.2: + resolution: {integrity: sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + mimic-function@5.0.1: + resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} + engines: {node: '>=18'} + + minimatch@10.2.5: + resolution: {integrity: sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==} + engines: {node: 18 || 20 || >=22} + + minimatch@3.1.5: + resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + motion-dom@11.18.1: + resolution: {integrity: sha512-g76KvA001z+atjfxczdRtw/RXOM3OMSdd1f4DL77qCTF/+avrRJiawSG4yDibEQ215sr9kpinSlX2pCTJ9zbhw==} + + motion-utils@11.18.1: + resolution: {integrity: sha512-49Kt+HKjtbJKLtgO/LKj9Ld+6vw9BjH5d9sc40R/kVyH8GLAXgT42M2NnuPcJNuA3s9ZfZBUcwIgpmZWGEE+hA==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + msw@2.12.14: + resolution: {integrity: sha512-4KXa4nVBIBjbDbd7vfQNuQ25eFxug0aropCQFoI0JdOBuJWamkT1yLVIWReFI8SiTRc+H1hKzaNk+cLk2N9rtQ==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + typescript: '>= 4.8.x' + peerDependenciesMeta: + typescript: + optional: true + + mute-stream@2.0.0: + resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} + engines: {node: ^18.17.0 || >=20.5.0} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + negotiator@1.0.0: + resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} + engines: {node: '>= 0.6'} + + next-themes@0.4.6: + resolution: {integrity: sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA==} + peerDependencies: + react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc + react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc + + node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + deprecated: Use your platform's native DOMException instead + + node-fetch-native@1.6.7: + resolution: {integrity: sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==} + + node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + node-releases@2.0.36: + resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + npm-run-path@6.0.0: + resolution: {integrity: sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==} + engines: {node: '>=18'} + + nypm@0.6.5: + resolution: {integrity: sha512-K6AJy1GMVyfyMXRVB88700BJqNUkByijGJM8kEHpLdcAt+vSQAVfkWWHYzuRXHSY6xA2sNc5RjTj0p9rE2izVQ==} + engines: {node: '>=18'} + hasBin: true + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + + ohash@2.0.11: + resolution: {integrity: sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==} + + on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + onetime@7.0.0: + resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} + engines: {node: '>=18'} + + oniguruma-to-es@2.3.0: + resolution: {integrity: sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g==} + + open@11.0.0: + resolution: {integrity: sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==} + engines: {node: '>=20'} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + ora@8.2.0: + resolution: {integrity: sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==} + engines: {node: '>=18'} + + outvariant@1.4.3: + resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-entities@4.0.2: + resolution: {integrity: sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + parse-ms@4.0.0: + resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} + engines: {node: '>=18'} + + parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + + path-browserify@1.0.1: + resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-key@4.0.0: + resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} + engines: {node: '>=12'} + + path-to-regexp@6.3.0: + resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==} + + path-to-regexp@8.4.1: + resolution: {integrity: sha512-fvU78fIjZ+SBM9YwCknCvKOUKkLVqtWDVctl0s7xIqfmfb38t2TT4ZU2gHm+Z8xGwgW+QWEU3oQSAzIbo89Ggw==} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + perfect-debounce@2.1.0: + resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.2: + resolution: {integrity: sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==} + engines: {node: '>=8.6'} + + picomatch@4.0.4: + resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==} + engines: {node: '>=12'} + + pkce-challenge@5.0.1: + resolution: {integrity: sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==} + engines: {node: '>=16.20.0'} + + pkg-types@2.3.0: + resolution: {integrity: sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==} + + postcss-selector-parser@7.1.1: + resolution: {integrity: sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==} + engines: {node: '>=4'} + + postcss@8.5.8: + resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} + engines: {node: ^10 || ^12 || >=14} + + powershell-utils@0.1.0: + resolution: {integrity: sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A==} + engines: {node: '>=20'} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + pretty-ms@9.3.0: + resolution: {integrity: sha512-gjVS5hOP+M3wMm5nmNOucbIrqudzs9v/57bWRHQWLYklXqoXKrVfYW2W9+glfGsqtPgpiz5WwyEEB+ksXIx3gQ==} + engines: {node: '>=18'} + + prompts@2.4.2: + resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} + engines: {node: '>= 6'} + + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + + proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + + proxy-from-env@2.1.0: + resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==} + engines: {node: '>=10'} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + qs@6.15.0: + resolution: {integrity: sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==} + engines: {node: '>=0.6'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + + raw-body@3.0.2: + resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} + engines: {node: '>= 0.10'} + + rc9@2.1.2: + resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} + + react-day-picker@9.14.0: + resolution: {integrity: sha512-tBaoDWjPwe0M5pGrum4H0SR6Lyk+BO9oHnp9JbKpGKW2mlraNPgP9BMfsg5pWpwrssARmeqk7YBl2oXutZTaHA==} + engines: {node: '>=18'} + peerDependencies: + react: '>=16.8.0' + + react-dom@19.2.4: + resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==} + peerDependencies: + react: ^19.2.4 + + react-is@19.2.4: + resolution: {integrity: sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==} + + react-markdown@10.1.0: + resolution: {integrity: sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==} + peerDependencies: + '@types/react': '>=18' + react: '>=18' + + react-redux@9.2.0: + resolution: {integrity: sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g==} + peerDependencies: + '@types/react': ^18.2.25 || ^19 + react: ^18.0 || ^19 + redux: ^5.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + redux: + optional: true + + react-remove-scroll-bar@2.3.8: + resolution: {integrity: sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + + react-remove-scroll@2.7.2: + resolution: {integrity: sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + react-resizable-panels@4.8.0: + resolution: {integrity: sha512-2uEABkewb3ky/ZgIlAUxWa1W/LjsK494fdV1QsXxst7CDRHCzo7h22tWWu3NNaBjmiuriOCt3CvhipnaYcpoIw==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + react-router-dom@7.13.2: + resolution: {integrity: sha512-aR7SUORwTqAW0JDeiWF07e9SBE9qGpByR9I8kJT5h/FrBKxPMS6TiC7rmVO+gC0q52Bx7JnjWe8Z1sR9faN4YA==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + + react-router@7.13.2: + resolution: {integrity: sha512-tX1Aee+ArlKQP+NIUd7SE6Li+CiGKwQtbS+FfRxPX6Pe4vHOo6nr9d++u5cwg+Z8K/x8tP+7qLmujDtfrAoUJA==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + peerDependenciesMeta: + react-dom: + optional: true + + react-style-singleton@2.2.3: + resolution: {integrity: sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + react@19.2.4: + resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} + engines: {node: '>=0.10.0'} + + readdirp@5.0.0: + resolution: {integrity: sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==} + engines: {node: '>= 20.19.0'} + + recast@0.23.11: + resolution: {integrity: sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA==} + engines: {node: '>= 4'} + + recharts@3.8.0: + resolution: {integrity: sha512-Z/m38DX3L73ExO4Tpc9/iZWHmHnlzWG4njQbxsF5aSjwqmHNDDIm0rdEBArkwsBvR8U6EirlEHiQNYWCVh9sGQ==} + engines: {node: '>=18'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-is: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + redux-thunk@3.1.0: + resolution: {integrity: sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==} + peerDependencies: + redux: ^5.0.0 + + redux@5.0.1: + resolution: {integrity: sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==} + + regex-recursion@5.1.1: + resolution: {integrity: sha512-ae7SBCbzVNrIjgSbh7wMznPcQel1DNlDtzensnFxpiNpXt1U2ju/bHugH422r+4LAVS1FpW1YCwilmnNsjum9w==} + + regex-utilities@2.3.0: + resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==} + + regex@5.1.1: + resolution: {integrity: sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw==} + + remark-gfm@4.0.1: + resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==} + + remark-parse@11.0.0: + resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==} + + remark-rehype@11.1.2: + resolution: {integrity: sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==} + + remark-stringify@11.0.0: + resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} + + remeda@2.33.7: + resolution: {integrity: sha512-cXlyjevWx5AcslOUEETG4o8XYi9UkoCXcJmj7XhPFVbla+ITuOBxv6ijBrmbeg+ZhzmDThkNdO+iXKUfrJep1w==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + reselect@5.1.1: + resolution: {integrity: sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + restore-cursor@5.1.0: + resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} + engines: {node: '>=18'} + + rettime@0.10.1: + resolution: {integrity: sha512-uyDrIlUEH37cinabq0AX4QbgV4HbFZ/gqoiunWQ1UqBtRvTTytwhNYjE++pO/MjPTZL5KQCf2bEoJ/BJNVQ5Kw==} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rolldown@1.0.0-rc.12: + resolution: {integrity: sha512-yP4USLIMYrwpPHEFB5JGH1uxhcslv6/hL0OyvTuY+3qlOSJvZ7ntYnoWpehBxufkgN0cvXxppuTu5hHa/zPh+A==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + + router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + + run-applescript@7.1.0: + resolution: {integrity: sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==} + engines: {node: '>=18'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} + engines: {node: '>=10'} + hasBin: true + + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} + engines: {node: '>=10'} + hasBin: true + + send@1.2.1: + resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==} + engines: {node: '>= 18'} + + serve-static@2.2.1: + resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==} + engines: {node: '>= 18'} + + set-cookie-parser@2.7.2: + resolution: {integrity: sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==} + + setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + + shadcn@4.1.1: + resolution: {integrity: sha512-nBj+7LYC9kzV9v9QmRPpoOhfW4KctJVQejywdAt/K+K+z4RYlJOcO2a4AaF7elrRWkfCbgXeGK02liV0KB9HvQ==} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shiki@1.29.2: + resolution: {integrity: sha512-njXuliz/cP+67jU2hukkxCNuH1yUi4QfdZZY+sMr5PPrIyXSu5iTb/qYC4BiWWB0vZ+7TbdvYUCeL23zpwCfbg==} + + side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + + side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + + side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + + side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + + sonner@2.0.7: + resolution: {integrity: sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 || ^19.0.0-rc + react-dom: ^18.0.0 || ^19.0.0 || ^19.0.0-rc + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + + stdin-discarder@0.2.2: + resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==} + engines: {node: '>=18'} + + strict-event-emitter@0.5.1: + resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@7.2.0: + resolution: {integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==} + engines: {node: '>=18'} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + + stringify-object@5.0.0: + resolution: {integrity: sha512-zaJYxz2FtcMb4f+g60KsRNFOpVMUyuJgA51Zi5Z1DOTC3S59+OQiVOzE9GZt0x72uBGWKsQIuBKeF9iusmKFsg==} + engines: {node: '>=14.16'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.2.0: + resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} + engines: {node: '>=12'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-final-newline@4.0.0: + resolution: {integrity: sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==} + engines: {node: '>=18'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + style-to-js@1.1.21: + resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==} + + style-to-object@1.0.14: + resolution: {integrity: sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + tabbable@6.4.0: + resolution: {integrity: sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg==} + + tagged-tag@1.0.0: + resolution: {integrity: sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==} + engines: {node: '>=20'} + + tailwind-merge@3.5.0: + resolution: {integrity: sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==} + + tailwindcss@4.2.2: + resolution: {integrity: sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==} + + tapable@2.3.2: + resolution: {integrity: sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==} + engines: {node: '>=6'} + + tiny-invariant@1.3.3: + resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + + tinyexec@1.0.4: + resolution: {integrity: sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==} + engines: {node: '>=18'} + + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + + tldts-core@7.0.27: + resolution: {integrity: sha512-YQ7uPjgWUibIK6DW5lrKujGwUKhLevU4hcGbP5O6TcIUb+oTjJYJVWPS4nZsIHrEEEG6myk/oqAJUEQmpZrHsg==} + + tldts@7.0.27: + resolution: {integrity: sha512-I4FZcVFcqCRuT0ph6dCDpPuO4Xgzvh+spkcTr1gK7peIvxWauoloVO0vuy1FQnijT63ss6AsHB6+OIM4aXHbPg==} + hasBin: true + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + + tough-cookie@6.0.1: + resolution: {integrity: sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==} + engines: {node: '>=16'} + + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + + trough@2.2.0: + resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + + ts-api-utils@2.5.0: + resolution: {integrity: sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + + ts-morph@26.0.0: + resolution: {integrity: sha512-ztMO++owQnz8c/gIENcM9XfCEzgoGphTv+nKpYNM1bgsdOVC/jRZuEBf6N+mLLDNg68Kl+GgUZfOySaRiG1/Ug==} + + tsconfig-paths@4.2.0: + resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==} + engines: {node: '>=6'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + tw-animate-css@1.4.0: + resolution: {integrity: sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ==} + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-fest@5.5.0: + resolution: {integrity: sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g==} + engines: {node: '>=20'} + + type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + + typescript-eslint@8.58.0: + resolution: {integrity: sha512-e2TQzKfaI85fO+F3QywtX+tCTsu/D3WW5LVU6nz8hTFKFZ8yBJ6mSYRpXqdR3mFjPWmO0eWsTa5f+UpAOe/FMA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.1.0' + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@7.16.0: + resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + + unicorn-magic@0.3.0: + resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} + engines: {node: '>=18'} + + unified@11.0.5: + resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} + + unist-util-is@6.0.1: + resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==} + + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.2: + resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==} + + unist-util-visit@5.1.0: + resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + + until-async@3.0.2: + resolution: {integrity: sha512-IiSk4HlzAMqTUseHHe3VhIGyuFmN90zMTpD3Z3y8jeQbzLIq500MVM7Jq2vUAnTKAFPJrqwkzr6PoTcPhGcOiw==} + + update-browserslist-db@1.2.3: + resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + use-callback-ref@1.3.3: + resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + use-sidecar@1.1.3: + resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + uuid@13.0.0: + resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} + hasBin: true + + validate-npm-package-name@7.0.2: + resolution: {integrity: sha512-hVDIBwsRruT73PbK7uP5ebUt+ezEtCmzZz3F59BSr2F6OVFnJ/6h8liuvdLrQ88Xmnk6/+xGGuq+pG9WwTuy3A==} + engines: {node: ^20.17.0 || >=22.9.0} + + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + + vaul@1.1.2: + resolution: {integrity: sha512-ZFkClGpWyI2WUQjdLJ/BaGuV6AVQiJ3uELGk3OYtP+B6yCO7Cmn9vPFXVJkRaGkOJu3m8bQMgtyzNHixULceQA==} + peerDependencies: + react: ^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc + + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} + + vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + + victory-vendor@37.3.6: + resolution: {integrity: sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ==} + + vite@8.0.3: + resolution: {integrity: sha512-B9ifbFudT1TFhfltfaIPgjo9Z3mDynBTJSUYxTjOQruf/zHH+ezCQKcoqO+h7a9Pw9Nm/OtlXAiGT1axBgwqrQ==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@types/node': ^20.19.0 || >=22.12.0 + '@vitejs/devtools': ^0.1.0 + esbuild: ^0.27.0 + jiti: '>=1.21.0' + less: ^4.0.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + '@vitejs/devtools': + optional: true + esbuild: + optional: true + jiti: + optional: true + less: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + web-streams-polyfill@3.3.3: + resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} + engines: {node: '>= 8'} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + wsl-utils@0.3.1: + resolution: {integrity: sha512-g/eziiSUNBSsdDJtCLB8bdYEUMj4jR7AGeUo96p/3dTafgjHhpF4RiCFPiRILwjQoDXx5MqkBr4fwWtR3Ky4Wg==} + engines: {node: '>=20'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + yoctocolors-cjs@2.1.3: + resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==} + engines: {node: '>=18'} + + yoctocolors@2.1.2: + resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==} + engines: {node: '>=18'} + + zod-to-json-schema@3.25.2: + resolution: {integrity: sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA==} + peerDependencies: + zod: ^3.25.28 || ^4 + + zod-validation-error@4.0.2: + resolution: {integrity: sha512-Q6/nZLe6jxuU80qb/4uJ4t5v2VEZ44lzQjPDhYJNztRQ4wyWc6VF3D3Kb/fAuPetZQnhS3hnajCf9CsWesghLQ==} + engines: {node: '>=18.0.0'} + peerDependencies: + zod: ^3.25.0 || ^4.0.0 + + zod@3.25.76: + resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} + + zod@4.3.6: + resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==} + + zustand@5.0.12: + resolution: {integrity: sha512-i77ae3aZq4dhMlRhJVCYgMLKuSiZAaUPAct2AksxQ+gOtimhGMdXljRT21P5BNpeT4kXlLIckvkPM029OljD7g==} + engines: {node: '>=12.20.0'} + peerDependencies: + '@types/react': '>=18.0.0' + immer: '>=9.0.6' + react: '>=18.0.0' + use-sync-external-store: '>=1.2.0' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + use-sync-external-store: + optional: true + + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + +snapshots: + + '@babel/code-frame@7.29.0': + dependencies: + '@babel/helper-validator-identifier': 7.28.5 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.29.0': {} + + '@babel/core@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helpers': 7.29.2 + '@babel/parser': 7.29.2 + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/remapping': 2.3.5 + convert-source-map: 2.0.0 + debug: 4.4.3 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.29.1': + dependencies: + '@babel/parser': 7.29.2 + '@babel/types': 7.29.0 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + + '@babel/helper-annotate-as-pure@7.27.3': + dependencies: + '@babel/types': 7.29.0 + + '@babel/helper-compilation-targets@7.28.6': + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.28.1 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-create-class-features-plugin@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-member-expression-to-functions': 7.28.5 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/helper-replace-supers': 7.28.6(@babel/core@7.29.0) + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/traverse': 7.29.0 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/helper-globals@7.28.0': {} + + '@babel/helper-member-expression-to-functions@7.28.5': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-imports@7.28.6': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-transforms@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-optimise-call-expression@7.27.1': + dependencies: + '@babel/types': 7.29.0 + + '@babel/helper-plugin-utils@7.28.6': {} + + '@babel/helper-replace-supers@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-member-expression-to-functions': 7.28.5 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/helper-validator-option@7.27.1': {} + + '@babel/helpers@7.29.2': + dependencies: + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + + '@babel/parser@7.29.2': + dependencies: + '@babel/types': 7.29.0 + + '@babel/plugin-syntax-jsx@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-typescript@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-modules-commonjs@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-typescript@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/plugin-syntax-typescript': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/preset-typescript@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-validator-option': 7.27.1 + '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-modules-commonjs': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-typescript': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/runtime@7.29.2': {} + + '@babel/template@7.28.6': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/parser': 7.29.2 + '@babel/types': 7.29.0 + + '@babel/traverse@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.29.2 + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.29.0': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@base-ui/react@1.3.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@babel/runtime': 7.29.2 + '@base-ui/utils': 0.2.6(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@floating-ui/react-dom': 2.1.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@floating-ui/utils': 0.2.11 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + tabbable: 6.4.0 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + + '@base-ui/utils@0.2.6(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@babel/runtime': 7.29.2 + '@floating-ui/utils': 0.2.11 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + reselect: 5.1.1 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + + '@date-fns/tz@1.4.1': {} + + '@dnd-kit/accessibility@3.1.1(react@19.2.4)': + dependencies: + react: 19.2.4 + tslib: 2.8.1 + + '@dnd-kit/core@6.3.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@dnd-kit/accessibility': 3.1.1(react@19.2.4) + '@dnd-kit/utilities': 3.2.2(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + tslib: 2.8.1 + + '@dnd-kit/sortable@10.0.0(@dnd-kit/core@6.3.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)': + dependencies: + '@dnd-kit/core': 6.3.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@dnd-kit/utilities': 3.2.2(react@19.2.4) + react: 19.2.4 + tslib: 2.8.1 + + '@dnd-kit/utilities@3.2.2(react@19.2.4)': + dependencies: + react: 19.2.4 + tslib: 2.8.1 + + '@dotenvx/dotenvx@1.59.1': + dependencies: + commander: 11.1.0 + dotenv: 17.3.1 + eciesjs: 0.4.18 + execa: 5.1.1 + fdir: 6.5.0(picomatch@4.0.4) + ignore: 5.3.2 + object-treeify: 1.1.33 + picomatch: 4.0.4 + which: 4.0.0 + + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': + dependencies: + '@noble/ciphers': 1.3.0 + + '@emnapi/core@1.9.1': + dependencies: + '@emnapi/wasi-threads': 1.2.0 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.9.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.2.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.4(jiti@2.6.1))': + dependencies: + eslint: 9.39.4(jiti@2.6.1) + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.2': {} + + '@eslint/config-array@0.21.2': + dependencies: + '@eslint/object-schema': 2.1.7 + debug: 4.4.3 + minimatch: 3.1.5 + transitivePeerDependencies: + - supports-color + + '@eslint/config-helpers@0.4.2': + dependencies: + '@eslint/core': 0.17.0 + + '@eslint/core@0.17.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/eslintrc@3.3.5': + dependencies: + ajv: 6.14.0 + debug: 4.4.3 + espree: 10.4.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + minimatch: 3.1.5 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.39.4': {} + + '@eslint/object-schema@2.1.7': {} + + '@eslint/plugin-kit@0.4.1': + dependencies: + '@eslint/core': 0.17.0 + levn: 0.4.1 + + '@floating-ui/core@1.7.5': + dependencies: + '@floating-ui/utils': 0.2.11 + + '@floating-ui/dom@1.7.6': + dependencies: + '@floating-ui/core': 1.7.5 + '@floating-ui/utils': 0.2.11 + + '@floating-ui/react-dom@2.1.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@floating-ui/dom': 1.7.6 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + '@floating-ui/utils@0.2.11': {} + + '@fontsource-variable/geist@5.2.8': {} + + '@gitgraph/core@1.5.0': {} + + '@gitgraph/react@1.6.0(react@19.2.4)': + dependencies: + '@gitgraph/core': 1.5.0 + react: 19.2.4 + + '@hey-api/codegen-core@0.7.4': + dependencies: + '@hey-api/types': 0.1.4 + ansi-colors: 4.1.3 + c12: 3.3.3 + color-support: 1.1.3 + transitivePeerDependencies: + - magicast + + '@hey-api/json-schema-ref-parser@1.3.1': + dependencies: + '@jsdevtools/ono': 7.1.3 + '@types/json-schema': 7.0.15 + js-yaml: 4.1.1 + + '@hey-api/openapi-ts@0.94.5(typescript@5.9.3)': + dependencies: + '@hey-api/codegen-core': 0.7.4 + '@hey-api/json-schema-ref-parser': 1.3.1 + '@hey-api/shared': 0.2.6 + '@hey-api/spec-types': 0.1.0 + '@hey-api/types': 0.1.4 + ansi-colors: 4.1.3 + color-support: 1.1.3 + commander: 14.0.3 + get-tsconfig: 4.13.6 + typescript: 5.9.3 + transitivePeerDependencies: + - magicast + + '@hey-api/shared@0.2.6': + dependencies: + '@hey-api/codegen-core': 0.7.4 + '@hey-api/json-schema-ref-parser': 1.3.1 + '@hey-api/spec-types': 0.1.0 + '@hey-api/types': 0.1.4 + ansi-colors: 4.1.3 + cross-spawn: 7.0.6 + open: 11.0.0 + semver: 7.7.3 + transitivePeerDependencies: + - magicast + + '@hey-api/spec-types@0.1.0': + dependencies: + '@hey-api/types': 0.1.4 + + '@hey-api/types@0.1.4': {} + + '@hono/node-server@1.19.12(hono@4.12.9)': + dependencies: + hono: 4.12.9 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.7': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.4.3 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.4.3': {} + + '@inquirer/ansi@1.0.2': {} + + '@inquirer/confirm@5.1.21(@types/node@24.12.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@24.12.0) + '@inquirer/type': 3.0.10(@types/node@24.12.0) + optionalDependencies: + '@types/node': 24.12.0 + + '@inquirer/core@10.3.2(@types/node@24.12.0)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@24.12.0) + cli-width: 4.1.0 + mute-stream: 2.0.0 + signal-exit: 4.1.0 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 24.12.0 + + '@inquirer/figures@1.0.15': {} + + '@inquirer/type@3.0.10(@types/node@24.12.0)': + optionalDependencies: + '@types/node': 24.12.0 + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@jsdevtools/ono@7.1.3': {} + + '@modelcontextprotocol/sdk@1.29.0(zod@3.25.76)': + dependencies: + '@hono/node-server': 1.19.12(hono@4.12.9) + ajv: 8.18.0 + ajv-formats: 3.0.1(ajv@8.18.0) + content-type: 1.0.5 + cors: 2.8.6 + cross-spawn: 7.0.6 + eventsource: 3.0.7 + eventsource-parser: 3.0.6 + express: 5.2.1 + express-rate-limit: 8.3.2(express@5.2.1) + hono: 4.12.9 + jose: 6.2.2 + json-schema-typed: 8.0.2 + pkce-challenge: 5.0.1 + raw-body: 3.0.2 + zod: 3.25.76 + zod-to-json-schema: 3.25.2(zod@3.25.76) + transitivePeerDependencies: + - supports-color + + '@mswjs/interceptors@0.41.3': + dependencies: + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/logger': 0.3.0 + '@open-draft/until': 2.1.0 + is-node-process: 1.2.0 + outvariant: 1.4.3 + strict-event-emitter: 0.5.1 + + '@napi-rs/wasm-runtime@1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)': + dependencies: + '@emnapi/core': 1.9.1 + '@emnapi/runtime': 1.9.1 + '@tybys/wasm-util': 0.10.1 + optional: true + + '@noble/ciphers@1.3.0': {} + + '@noble/curves@1.9.7': + dependencies: + '@noble/hashes': 1.8.0 + + '@noble/hashes@1.8.0': {} + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.20.1 + + '@open-draft/deferred-promise@2.2.0': {} + + '@open-draft/logger@0.3.0': + dependencies: + is-node-process: 1.2.0 + outvariant: 1.4.3 + + '@open-draft/until@2.1.0': {} + + '@oxc-project/types@0.122.0': {} + + '@radix-ui/primitive@1.1.3': {} + + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.2.14)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-context@1.1.2(@types/react@19.2.14)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-dialog@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + aria-hidden: 1.2.6 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-focus-guards@1.1.3(@types/react@19.2.14)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-id@1.1.1(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-portal@1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-presence@1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-primitive@2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-slot': 1.2.4(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-slot@1.2.3(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-slot@1.2.4(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.2.14)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.2.14)(react@19.2.4)': + dependencies: + react: 19.2.4 + optionalDependencies: + '@types/react': 19.2.14 + + '@reduxjs/toolkit@2.11.2(react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1))(react@19.2.4)': + dependencies: + '@standard-schema/spec': 1.1.0 + '@standard-schema/utils': 0.3.0 + immer: 11.1.4 + redux: 5.0.1 + redux-thunk: 3.1.0(redux@5.0.1) + reselect: 5.1.1 + optionalDependencies: + react: 19.2.4 + react-redux: 9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1) + + '@rolldown/binding-android-arm64@1.0.0-rc.12': + optional: true + + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': + optional: true + + '@rolldown/binding-darwin-x64@1.0.0-rc.12': + optional: true + + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': + optional: true + + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': + optional: true + + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': + optional: true + + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)': + dependencies: + '@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1) + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' + optional: true + + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': + optional: true + + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': + optional: true + + '@rolldown/pluginutils@1.0.0-rc.12': {} + + '@rolldown/pluginutils@1.0.0-rc.7': {} + + '@sec-ant/readable-stream@0.4.1': {} + + '@shikijs/core@1.29.2': + dependencies: + '@shikijs/engine-javascript': 1.29.2 + '@shikijs/engine-oniguruma': 1.29.2 + '@shikijs/types': 1.29.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + + '@shikijs/engine-javascript@1.29.2': + dependencies: + '@shikijs/types': 1.29.2 + '@shikijs/vscode-textmate': 10.0.2 + oniguruma-to-es: 2.3.0 + + '@shikijs/engine-oniguruma@1.29.2': + dependencies: + '@shikijs/types': 1.29.2 + '@shikijs/vscode-textmate': 10.0.2 + + '@shikijs/langs@1.29.2': + dependencies: + '@shikijs/types': 1.29.2 + + '@shikijs/themes@1.29.2': + dependencies: + '@shikijs/types': 1.29.2 + + '@shikijs/types@1.29.2': + dependencies: + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + '@shikijs/vscode-textmate@10.0.2': {} + + '@sindresorhus/merge-streams@4.0.0': {} + + '@standard-schema/spec@1.1.0': {} + + '@standard-schema/utils@0.3.0': {} + + '@swc/core-darwin-arm64@1.15.21': + optional: true + + '@swc/core-darwin-x64@1.15.21': + optional: true + + '@swc/core-linux-arm-gnueabihf@1.15.21': + optional: true + + '@swc/core-linux-arm64-gnu@1.15.21': + optional: true + + '@swc/core-linux-arm64-musl@1.15.21': + optional: true + + '@swc/core-linux-ppc64-gnu@1.15.21': + optional: true + + '@swc/core-linux-s390x-gnu@1.15.21': + optional: true + + '@swc/core-linux-x64-gnu@1.15.21': + optional: true + + '@swc/core-linux-x64-musl@1.15.21': + optional: true + + '@swc/core-win32-arm64-msvc@1.15.21': + optional: true + + '@swc/core-win32-ia32-msvc@1.15.21': + optional: true + + '@swc/core-win32-x64-msvc@1.15.21': + optional: true + + '@swc/core@1.15.21': + dependencies: + '@swc/counter': 0.1.3 + '@swc/types': 0.1.26 + optionalDependencies: + '@swc/core-darwin-arm64': 1.15.21 + '@swc/core-darwin-x64': 1.15.21 + '@swc/core-linux-arm-gnueabihf': 1.15.21 + '@swc/core-linux-arm64-gnu': 1.15.21 + '@swc/core-linux-arm64-musl': 1.15.21 + '@swc/core-linux-ppc64-gnu': 1.15.21 + '@swc/core-linux-s390x-gnu': 1.15.21 + '@swc/core-linux-x64-gnu': 1.15.21 + '@swc/core-linux-x64-musl': 1.15.21 + '@swc/core-win32-arm64-msvc': 1.15.21 + '@swc/core-win32-ia32-msvc': 1.15.21 + '@swc/core-win32-x64-msvc': 1.15.21 + + '@swc/counter@0.1.3': {} + + '@swc/types@0.1.26': + dependencies: + '@swc/counter': 0.1.3 + + '@tabby_ai/hijri-converter@1.0.5': {} + + '@tailwindcss/node@4.2.2': + dependencies: + '@jridgewell/remapping': 2.3.5 + enhanced-resolve: 5.20.1 + jiti: 2.6.1 + lightningcss: 1.32.0 + magic-string: 0.30.21 + source-map-js: 1.2.1 + tailwindcss: 4.2.2 + + '@tailwindcss/oxide-android-arm64@4.2.2': + optional: true + + '@tailwindcss/oxide-darwin-arm64@4.2.2': + optional: true + + '@tailwindcss/oxide-darwin-x64@4.2.2': + optional: true + + '@tailwindcss/oxide-freebsd-x64@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm64-musl@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-x64-gnu@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-x64-musl@4.2.2': + optional: true + + '@tailwindcss/oxide-wasm32-wasi@4.2.2': + optional: true + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': + optional: true + + '@tailwindcss/oxide-win32-x64-msvc@4.2.2': + optional: true + + '@tailwindcss/oxide@4.2.2': + optionalDependencies: + '@tailwindcss/oxide-android-arm64': 4.2.2 + '@tailwindcss/oxide-darwin-arm64': 4.2.2 + '@tailwindcss/oxide-darwin-x64': 4.2.2 + '@tailwindcss/oxide-freebsd-x64': 4.2.2 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.2 + '@tailwindcss/oxide-linux-arm64-gnu': 4.2.2 + '@tailwindcss/oxide-linux-arm64-musl': 4.2.2 + '@tailwindcss/oxide-linux-x64-gnu': 4.2.2 + '@tailwindcss/oxide-linux-x64-musl': 4.2.2 + '@tailwindcss/oxide-wasm32-wasi': 4.2.2 + '@tailwindcss/oxide-win32-arm64-msvc': 4.2.2 + '@tailwindcss/oxide-win32-x64-msvc': 4.2.2 + + '@tailwindcss/vite@4.2.2(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1))': + dependencies: + '@tailwindcss/node': 4.2.2 + '@tailwindcss/oxide': 4.2.2 + tailwindcss: 4.2.2 + vite: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1) + + '@tanstack/query-core@5.96.0': {} + + '@tanstack/react-query@5.96.0(react@19.2.4)': + dependencies: + '@tanstack/query-core': 5.96.0 + react: 19.2.4 + + '@tanstack/react-virtual@3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/virtual-core': 3.13.23 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + '@tanstack/virtual-core@3.13.23': {} + + '@ts-morph/common@0.27.0': + dependencies: + fast-glob: 3.3.3 + minimatch: 10.2.5 + path-browserify: 1.0.1 + + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@types/d3-array@3.2.2': {} + + '@types/d3-color@3.1.3': {} + + '@types/d3-ease@3.0.2': {} + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-path@3.1.1': {} + + '@types/d3-scale@4.0.9': + dependencies: + '@types/d3-time': 3.0.4 + + '@types/d3-shape@3.1.8': + dependencies: + '@types/d3-path': 3.1.1 + + '@types/d3-time@3.0.4': {} + + '@types/d3-timer@3.0.2': {} + + '@types/debug@4.1.13': + dependencies: + '@types/ms': 2.1.0 + + '@types/estree-jsx@1.0.5': + dependencies: + '@types/estree': 1.0.8 + + '@types/estree@1.0.8': {} + + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/json-schema@7.0.15': {} + + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/ms@2.1.0': {} + + '@types/node@24.12.0': + dependencies: + undici-types: 7.16.0 + + '@types/react-dom@19.2.3(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + + '@types/react@19.2.14': + dependencies: + csstype: 3.2.3 + + '@types/statuses@2.0.6': {} + + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + + '@types/use-sync-external-store@0.0.6': {} + + '@types/validate-npm-package-name@4.0.2': {} + + '@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/type-utils': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.58.0 + eslint: 9.39.4(jiti@2.6.1) + ignore: 7.0.5 + natural-compare: 1.4.0 + ts-api-utils: 2.5.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.58.0 + debug: 4.4.3 + eslint: 9.39.4(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.58.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@5.9.3) + '@typescript-eslint/types': 8.58.0 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.58.0': + dependencies: + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 + + '@typescript-eslint/tsconfig-utils@8.58.0(typescript@5.9.3)': + dependencies: + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + debug: 4.4.3 + eslint: 9.39.4(jiti@2.6.1) + ts-api-utils: 2.5.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.58.0': {} + + '@typescript-eslint/typescript-estree@8.58.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/project-service': 8.58.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@5.9.3) + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 + debug: 4.4.3 + minimatch: 10.2.5 + semver: 7.7.4 + tinyglobby: 0.2.15 + ts-api-utils: 2.5.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.4(jiti@2.6.1)) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + eslint: 9.39.4(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@8.58.0': + dependencies: + '@typescript-eslint/types': 8.58.0 + eslint-visitor-keys: 5.0.1 + + '@ungap/structured-clone@1.3.0': {} + + '@vitejs/plugin-react-swc@4.3.0(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1))': + dependencies: + '@rolldown/pluginutils': 1.0.0-rc.7 + '@swc/core': 1.15.21 + vite: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1) + transitivePeerDependencies: + - '@swc/helpers' + + '@vitejs/plugin-react@6.0.1(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1))': + dependencies: + '@rolldown/pluginutils': 1.0.0-rc.7 + vite: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1) + + accepts@2.0.0: + dependencies: + mime-types: 3.0.2 + negotiator: 1.0.0 + + acorn-jsx@5.3.2(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + agent-base@7.1.4: {} + + ajv-formats@3.0.1(ajv@8.18.0): + optionalDependencies: + ajv: 8.18.0 + + ajv@6.14.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ajv@8.18.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + + ansi-colors@4.1.3: {} + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + aria-hidden@1.2.6: + dependencies: + tslib: 2.8.1 + + ast-types@0.16.1: + dependencies: + tslib: 2.8.1 + + asynckit@0.4.0: {} + + axios@1.14.0: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 2.1.0 + transitivePeerDependencies: + - debug + + bail@2.0.2: {} + + balanced-match@1.0.2: {} + + balanced-match@4.0.4: {} + + baseline-browser-mapping@2.10.12: {} + + body-parser@2.2.2: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.3 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + on-finished: 2.4.1 + qs: 6.15.0 + raw-body: 3.0.2 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + + brace-expansion@1.1.13: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@5.0.5: + dependencies: + balanced-match: 4.0.4 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.28.1: + dependencies: + baseline-browser-mapping: 2.10.12 + caniuse-lite: 1.0.30001782 + electron-to-chromium: 1.5.329 + node-releases: 2.0.36 + update-browserslist-db: 1.2.3(browserslist@4.28.1) + + bundle-name@4.1.0: + dependencies: + run-applescript: 7.1.0 + + bytes@3.1.2: {} + + c12@3.3.3: + dependencies: + chokidar: 5.0.0 + confbox: 0.2.4 + defu: 6.1.4 + dotenv: 17.3.1 + exsolve: 1.0.8 + giget: 2.0.0 + jiti: 2.6.1 + ohash: 2.0.11 + pathe: 2.0.3 + perfect-debounce: 2.1.0 + pkg-types: 2.3.0 + rc9: 2.1.2 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + + callsites@3.1.0: {} + + caniuse-lite@1.0.30001782: {} + + ccount@2.0.1: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.6.2: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + character-reference-invalid@2.0.1: {} + + chokidar@5.0.0: + dependencies: + readdirp: 5.0.0 + + citty@0.1.6: + dependencies: + consola: 3.4.2 + + citty@0.2.1: {} + + class-variance-authority@0.7.1: + dependencies: + clsx: 2.1.1 + + cli-cursor@5.0.0: + dependencies: + restore-cursor: 5.1.0 + + cli-spinners@2.9.2: {} + + cli-width@4.1.0: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + clsx@2.1.1: {} + + cmdk@1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - '@types/react-dom' + + code-block-writer@13.0.3: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + color-support@1.1.3: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + comma-separated-tokens@2.0.3: {} + + commander@11.1.0: {} + + commander@14.0.3: {} + + concat-map@0.0.1: {} + + confbox@0.2.4: {} + + consola@3.4.2: {} + + content-disposition@1.0.1: {} + + content-type@1.0.5: {} + + convert-source-map@2.0.0: {} + + cookie-signature@1.2.2: {} + + cookie@0.7.2: {} + + cookie@1.1.1: {} + + cors@2.8.6: + dependencies: + object-assign: 4.1.1 + vary: 1.1.2 + + cosmiconfig@9.0.1(typescript@5.9.3): + dependencies: + env-paths: 2.2.1 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + parse-json: 5.2.0 + optionalDependencies: + typescript: 5.9.3 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + cssesc@3.0.0: {} + + csstype@3.2.3: {} + + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + + d3-color@3.1.0: {} + + d3-ease@3.0.1: {} + + d3-format@3.1.2: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-path@3.1.0: {} + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.2 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + data-uri-to-buffer@4.0.1: {} + + date-fns-jalali@4.1.0-0: {} + + date-fns@4.1.0: {} + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + decimal.js-light@2.5.1: {} + + decode-named-character-reference@1.3.0: + dependencies: + character-entities: 2.0.2 + + dedent@1.7.2: {} + + deep-is@0.1.4: {} + + deepmerge@4.3.1: {} + + default-browser-id@5.0.1: {} + + default-browser@5.5.0: + dependencies: + bundle-name: 4.1.0 + default-browser-id: 5.0.1 + + define-lazy-prop@3.0.0: {} + + defu@6.1.4: {} + + delayed-stream@1.0.0: {} + + depd@2.0.0: {} + + dequal@2.0.3: {} + + destr@2.0.5: {} + + detect-libc@2.1.2: {} + + detect-node-es@1.1.0: {} + + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + diff@8.0.4: {} + + dotenv@17.3.1: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + eciesjs@0.4.18: + dependencies: + '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) + '@noble/ciphers': 1.3.0 + '@noble/curves': 1.9.7 + '@noble/hashes': 1.8.0 + + ee-first@1.1.1: {} + + electron-to-chromium@1.5.329: {} + + embla-carousel-react@8.6.0(react@19.2.4): + dependencies: + embla-carousel: 8.6.0 + embla-carousel-reactive-utils: 8.6.0(embla-carousel@8.6.0) + react: 19.2.4 + + embla-carousel-reactive-utils@8.6.0(embla-carousel@8.6.0): + dependencies: + embla-carousel: 8.6.0 + + embla-carousel@8.6.0: {} + + emoji-regex-xs@1.0.0: {} + + emoji-regex@10.6.0: {} + + emoji-regex@8.0.0: {} + + encodeurl@2.0.0: {} + + enhanced-resolve@5.20.1: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.2 + + env-paths@2.2.1: {} + + error-ex@1.3.4: + dependencies: + is-arrayish: 0.2.1 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + es-toolkit@1.45.1: {} + + escalade@3.2.0: {} + + escape-html@1.0.3: {} + + escape-string-regexp@4.0.0: {} + + escape-string-regexp@5.0.0: {} + + eslint-plugin-react-hooks@7.0.1(eslint@9.39.4(jiti@2.6.1)): + dependencies: + '@babel/core': 7.29.0 + '@babel/parser': 7.29.2 + eslint: 9.39.4(jiti@2.6.1) + hermes-parser: 0.25.1 + zod: 4.3.6 + zod-validation-error: 4.0.2(zod@4.3.6) + transitivePeerDependencies: + - supports-color + + eslint-plugin-react-refresh@0.5.2(eslint@9.39.4(jiti@2.6.1)): + dependencies: + eslint: 9.39.4(jiti@2.6.1) + + eslint-scope@8.4.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.1: {} + + eslint-visitor-keys@5.0.1: {} + + eslint@9.39.4(jiti@2.6.1): + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.4(jiti@2.6.1)) + '@eslint-community/regexpp': 4.12.2 + '@eslint/config-array': 0.21.2 + '@eslint/config-helpers': 0.4.2 + '@eslint/core': 0.17.0 + '@eslint/eslintrc': 3.3.5 + '@eslint/js': 9.39.4 + '@eslint/plugin-kit': 0.4.1 + '@humanfs/node': 0.16.7 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 + ajv: 6.14.0 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.3 + escape-string-regexp: 4.0.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 + esquery: 1.7.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.5 + natural-compare: 1.4.0 + optionator: 0.9.4 + optionalDependencies: + jiti: 2.6.1 + transitivePeerDependencies: + - supports-color + + espree@10.4.0: + dependencies: + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) + eslint-visitor-keys: 4.2.1 + + esprima@4.0.1: {} + + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + estree-util-is-identifier-name@3.0.0: {} + + esutils@2.0.3: {} + + etag@1.8.1: {} + + eventemitter3@5.0.4: {} + + eventsource-parser@3.0.6: {} + + eventsource@3.0.7: + dependencies: + eventsource-parser: 3.0.6 + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + execa@9.6.1: + dependencies: + '@sindresorhus/merge-streams': 4.0.0 + cross-spawn: 7.0.6 + figures: 6.1.0 + get-stream: 9.0.1 + human-signals: 8.0.1 + is-plain-obj: 4.1.0 + is-stream: 4.0.1 + npm-run-path: 6.0.0 + pretty-ms: 9.3.0 + signal-exit: 4.1.0 + strip-final-newline: 4.0.0 + yoctocolors: 2.1.2 + + express-rate-limit@8.3.2(express@5.2.1): + dependencies: + express: 5.2.1 + ip-address: 10.1.0 + + express@5.2.1: + dependencies: + accepts: 2.0.0 + body-parser: 2.2.2 + content-disposition: 1.0.1 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.2.2 + debug: 4.4.3 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.1 + fresh: 2.0.0 + http-errors: 2.0.1 + merge-descriptors: 2.0.0 + mime-types: 3.0.2 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.15.0 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.2.1 + serve-static: 2.2.1 + statuses: 2.0.2 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + exsolve@1.0.8: {} + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fast-uri@3.1.0: {} + + fastq@1.20.1: + dependencies: + reusify: 1.1.0 + + fdir@6.5.0(picomatch@4.0.4): + optionalDependencies: + picomatch: 4.0.4 + + fetch-blob@3.2.0: + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.3.3 + + figures@6.1.0: + dependencies: + is-unicode-supported: 2.1.0 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + finalhandler@2.1.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + flat-cache@4.0.1: + dependencies: + flatted: 3.4.2 + keyv: 4.5.4 + + flatted@3.4.2: {} + + follow-redirects@1.15.11: {} + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + formdata-polyfill@4.0.10: + dependencies: + fetch-blob: 3.2.0 + + forwarded@0.2.0: {} + + framer-motion@11.18.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + motion-dom: 11.18.1 + motion-utils: 11.18.1 + tslib: 2.8.1 + optionalDependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + fresh@2.0.0: {} + + fs-extra@11.3.4: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.2.0 + universalify: 2.0.1 + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + fuzzysort@3.1.0: {} + + gensync@1.0.0-beta.2: {} + + get-caller-file@2.0.5: {} + + get-east-asian-width@1.5.0: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-nonce@1.0.1: {} + + get-own-enumerable-keys@1.0.0: {} + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-stream@6.0.1: {} + + get-stream@9.0.1: + dependencies: + '@sec-ant/readable-stream': 0.4.1 + is-stream: 4.0.1 + + get-tsconfig@4.13.6: + dependencies: + resolve-pkg-maps: 1.0.0 + + giget@2.0.0: + dependencies: + citty: 0.1.6 + consola: 3.4.2 + defu: 6.1.4 + node-fetch-native: 1.6.7 + nypm: 0.6.5 + pathe: 2.0.3 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + globals@14.0.0: {} + + globals@17.4.0: {} + + gopd@1.2.0: {} + + graceful-fs@4.2.11: {} + + graphql@16.13.2: {} + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hast-util-to-html@9.0.5: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + + hast-util-to-jsx-runtime@2.3.6: + dependencies: + '@types/estree': 1.0.8 + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + headers-polyfill@4.0.3: {} + + hermes-estree@0.25.1: {} + + hermes-parser@0.25.1: + dependencies: + hermes-estree: 0.25.1 + + hono@4.12.9: {} + + html-url-attributes@3.0.1: {} + + html-void-elements@3.0.0: {} + + http-errors@2.0.1: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + human-signals@2.1.0: {} + + human-signals@8.0.1: {} + + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + immer@10.2.0: {} + + immer@11.1.4: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + inherits@2.0.4: {} + + inline-style-parser@0.2.7: {} + + input-otp@1.4.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + internmap@2.0.3: {} + + ip-address@10.1.0: {} + + ipaddr.js@1.9.1: {} + + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + + is-arrayish@0.2.1: {} + + is-decimal@2.0.1: {} + + is-docker@3.0.0: {} + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-hexadecimal@2.0.1: {} + + is-in-ssh@1.0.0: {} + + is-inside-container@1.0.0: + dependencies: + is-docker: 3.0.0 + + is-interactive@2.0.0: {} + + is-node-process@1.2.0: {} + + is-number@7.0.0: {} + + is-obj@3.0.0: {} + + is-plain-obj@4.1.0: {} + + is-promise@4.0.0: {} + + is-regexp@3.1.0: {} + + is-stream@2.0.1: {} + + is-stream@4.0.1: {} + + is-unicode-supported@1.3.0: {} + + is-unicode-supported@2.1.0: {} + + is-wsl@3.1.1: + dependencies: + is-inside-container: 1.0.0 + + isexe@2.0.0: {} + + isexe@3.1.5: {} + + jiti@2.6.1: {} + + jose@6.2.2: {} + + js-tokens@4.0.0: {} + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + jsencrypt@3.5.4: {} + + jsesc@3.1.0: {} + + json-buffer@3.0.1: {} + + json-parse-even-better-errors@2.3.1: {} + + json-schema-traverse@0.4.1: {} + + json-schema-traverse@1.0.0: {} + + json-schema-typed@8.0.2: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@2.2.3: {} + + jsonfile@6.2.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + kleur@3.0.3: {} + + kleur@4.1.5: {} + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lightningcss-android-arm64@1.32.0: + optional: true + + lightningcss-darwin-arm64@1.32.0: + optional: true + + lightningcss-darwin-x64@1.32.0: + optional: true + + lightningcss-freebsd-x64@1.32.0: + optional: true + + lightningcss-linux-arm-gnueabihf@1.32.0: + optional: true + + lightningcss-linux-arm64-gnu@1.32.0: + optional: true + + lightningcss-linux-arm64-musl@1.32.0: + optional: true + + lightningcss-linux-x64-gnu@1.32.0: + optional: true + + lightningcss-linux-x64-musl@1.32.0: + optional: true + + lightningcss-win32-arm64-msvc@1.32.0: + optional: true + + lightningcss-win32-x64-msvc@1.32.0: + optional: true + + lightningcss@1.32.0: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.32.0 + lightningcss-darwin-arm64: 1.32.0 + lightningcss-darwin-x64: 1.32.0 + lightningcss-freebsd-x64: 1.32.0 + lightningcss-linux-arm-gnueabihf: 1.32.0 + lightningcss-linux-arm64-gnu: 1.32.0 + lightningcss-linux-arm64-musl: 1.32.0 + lightningcss-linux-x64-gnu: 1.32.0 + lightningcss-linux-x64-musl: 1.32.0 + lightningcss-win32-arm64-msvc: 1.32.0 + lightningcss-win32-x64-msvc: 1.32.0 + + lines-and-columns@1.2.4: {} + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.merge@4.6.2: {} + + log-symbols@6.0.0: + dependencies: + chalk: 5.6.2 + is-unicode-supported: 1.3.0 + + longest-streak@3.1.0: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + lucide-react@1.7.0(react@19.2.4): + dependencies: + react: 19.2.4 + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + markdown-table@3.0.4: {} + + math-intrinsics@1.1.0: {} + + mdast-util-find-and-replace@3.0.2: + dependencies: + '@types/mdast': 4.0.4 + escape-string-regexp: 5.0.0 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + mdast-util-from-markdown@2.0.3: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.2 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-decode-string: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-autolink-literal@2.0.1: + dependencies: + '@types/mdast': 4.0.4 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-find-and-replace: 3.0.2 + micromark-util-character: 2.1.1 + + mdast-util-gfm-footnote@2.1.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + micromark-util-normalize-identifier: 2.0.1 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-strikethrough@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-table@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + markdown-table: 3.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-task-list-item@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm@3.1.0: + dependencies: + mdast-util-from-markdown: 2.0.3 + mdast-util-gfm-autolink-literal: 2.0.1 + mdast-util-gfm-footnote: 2.1.0 + mdast-util-gfm-strikethrough: 2.0.0 + mdast-util-gfm-table: 2.0.0 + mdast-util-gfm-task-list-item: 2.0.0 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-expression@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.2.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.2 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + mdast-util-mdxjs-esm@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + '@types/mdast': 4.0.4 + unist-util-is: 6.0.1 + + mdast-util-to-hast@13.2.1: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + + mdast-util-to-markdown@2.1.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-classify-character: 2.0.1 + micromark-util-decode-string: 2.0.1 + unist-util-visit: 5.1.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + '@types/mdast': 4.0.4 + + media-typer@1.1.0: {} + + merge-descriptors@2.0.0: {} + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + micromark-core-commonmark@2.0.3: + dependencies: + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-factory-destination: 2.0.1 + micromark-factory-label: 2.0.1 + micromark-factory-space: 2.0.1 + micromark-factory-title: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-html-tag-name: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-autolink-literal@2.1.0: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-footnote@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-strikethrough@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-table@2.1.1: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-tagfilter@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-gfm-task-list-item@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm@3.0.0: + dependencies: + micromark-extension-gfm-autolink-literal: 2.1.0 + micromark-extension-gfm-footnote: 2.1.0 + micromark-extension-gfm-strikethrough: 2.1.0 + micromark-extension-gfm-table: 2.1.1 + micromark-extension-gfm-tagfilter: 2.0.0 + micromark-extension-gfm-task-list-item: 2.1.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-destination@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-label@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-space@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-types: 2.0.2 + + micromark-factory-title@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-whitespace@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-character@2.1.1: + dependencies: + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-chunked@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-classify-character@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-combine-extensions@2.0.1: + dependencies: + micromark-util-chunked: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-decode-numeric-character-reference@2.0.2: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-decode-string@2.0.1: + dependencies: + decode-named-character-reference: 1.3.0 + micromark-util-character: 2.1.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-symbol: 2.0.1 + + micromark-util-encode@2.0.1: {} + + micromark-util-html-tag-name@2.0.1: {} + + micromark-util-normalize-identifier@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-resolve-all@2.0.1: + dependencies: + micromark-util-types: 2.0.2 + + micromark-util-sanitize-uri@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 + + micromark-util-subtokenize@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-symbol@2.0.1: {} + + micromark-util-types@2.0.2: {} + + micromark@4.0.2: + dependencies: + '@types/debug': 4.1.13 + debug: 4.4.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-combine-extensions: 2.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-encode: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + transitivePeerDependencies: + - supports-color + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.2 + + mime-db@1.52.0: {} + + mime-db@1.54.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mime-types@3.0.2: + dependencies: + mime-db: 1.54.0 + + mimic-fn@2.1.0: {} + + mimic-function@5.0.1: {} + + minimatch@10.2.5: + dependencies: + brace-expansion: 5.0.5 + + minimatch@3.1.5: + dependencies: + brace-expansion: 1.1.13 + + minimist@1.2.8: {} + + motion-dom@11.18.1: + dependencies: + motion-utils: 11.18.1 + + motion-utils@11.18.1: {} + + ms@2.1.3: {} + + msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3): + dependencies: + '@inquirer/confirm': 5.1.21(@types/node@24.12.0) + '@mswjs/interceptors': 0.41.3 + '@open-draft/deferred-promise': 2.2.0 + '@types/statuses': 2.0.6 + cookie: 1.1.1 + graphql: 16.13.2 + headers-polyfill: 4.0.3 + is-node-process: 1.2.0 + outvariant: 1.4.3 + path-to-regexp: 6.3.0 + picocolors: 1.1.1 + rettime: 0.10.1 + statuses: 2.0.2 + strict-event-emitter: 0.5.1 + tough-cookie: 6.0.1 + type-fest: 5.5.0 + until-async: 3.0.2 + yargs: 17.7.2 + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - '@types/node' + + mute-stream@2.0.0: {} + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + negotiator@1.0.0: {} + + next-themes@0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + node-domexception@1.0.0: {} + + node-fetch-native@1.6.7: {} + + node-fetch@3.3.2: + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + + node-releases@2.0.36: {} + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + npm-run-path@6.0.0: + dependencies: + path-key: 4.0.0 + unicorn-magic: 0.3.0 + + nypm@0.6.5: + dependencies: + citty: 0.2.1 + pathe: 2.0.3 + tinyexec: 1.0.4 + + object-assign@4.1.1: {} + + object-inspect@1.13.4: {} + + object-treeify@1.1.33: {} + + ohash@2.0.11: {} + + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + onetime@7.0.0: + dependencies: + mimic-function: 5.0.1 + + oniguruma-to-es@2.3.0: + dependencies: + emoji-regex-xs: 1.0.0 + regex: 5.1.1 + regex-recursion: 5.1.1 + + open@11.0.0: + dependencies: + default-browser: 5.5.0 + define-lazy-prop: 3.0.0 + is-in-ssh: 1.0.0 + is-inside-container: 1.0.0 + powershell-utils: 0.1.0 + wsl-utils: 0.3.1 + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + ora@8.2.0: + dependencies: + chalk: 5.6.2 + cli-cursor: 5.0.0 + cli-spinners: 2.9.2 + is-interactive: 2.0.0 + is-unicode-supported: 2.1.0 + log-symbols: 6.0.0 + stdin-discarder: 0.2.2 + string-width: 7.2.0 + strip-ansi: 7.2.0 + + outvariant@1.4.3: {} + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-entities@4.0.2: + dependencies: + '@types/unist': 2.0.11 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.3.0 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.29.0 + error-ex: 1.3.4 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + parse-ms@4.0.0: {} + + parseurl@1.3.3: {} + + path-browserify@1.0.1: {} + + path-exists@4.0.0: {} + + path-key@3.1.1: {} + + path-key@4.0.0: {} + + path-to-regexp@6.3.0: {} + + path-to-regexp@8.4.1: {} + + pathe@2.0.3: {} + + perfect-debounce@2.1.0: {} + + picocolors@1.1.1: {} + + picomatch@2.3.2: {} + + picomatch@4.0.4: {} + + pkce-challenge@5.0.1: {} + + pkg-types@2.3.0: + dependencies: + confbox: 0.2.4 + exsolve: 1.0.8 + pathe: 2.0.3 + + postcss-selector-parser@7.1.1: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss@8.5.8: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + powershell-utils@0.1.0: {} + + prelude-ls@1.2.1: {} + + pretty-ms@9.3.0: + dependencies: + parse-ms: 4.0.0 + + prompts@2.4.2: + dependencies: + kleur: 3.0.3 + sisteransi: 1.0.5 + + property-information@7.1.0: {} + + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + + proxy-from-env@2.1.0: {} + + punycode@2.3.1: {} + + qs@6.15.0: + dependencies: + side-channel: 1.1.0 + + queue-microtask@1.2.3: {} + + range-parser@1.2.1: {} + + raw-body@3.0.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + unpipe: 1.0.0 + + rc9@2.1.2: + dependencies: + defu: 6.1.4 + destr: 2.0.5 + + react-day-picker@9.14.0(react@19.2.4): + dependencies: + '@date-fns/tz': 1.4.1 + '@tabby_ai/hijri-converter': 1.0.5 + date-fns: 4.1.0 + date-fns-jalali: 4.1.0-0 + react: 19.2.4 + + react-dom@19.2.4(react@19.2.4): + dependencies: + react: 19.2.4 + scheduler: 0.27.0 + + react-is@19.2.4: {} + + react-markdown@10.1.0(@types/react@19.2.14)(react@19.2.4): + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/react': 19.2.14 + devlop: 1.1.0 + hast-util-to-jsx-runtime: 2.3.6 + html-url-attributes: 3.0.1 + mdast-util-to-hast: 13.2.1 + react: 19.2.4 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + unified: 11.0.5 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + + react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1): + dependencies: + '@types/use-sync-external-store': 0.0.6 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + redux: 5.0.1 + + react-remove-scroll-bar@2.3.8(@types/react@19.2.14)(react@19.2.4): + dependencies: + react: 19.2.4 + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.4) + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + react-remove-scroll@2.7.2(@types/react@19.2.14)(react@19.2.4): + dependencies: + react: 19.2.4 + react-remove-scroll-bar: 2.3.8(@types/react@19.2.14)(react@19.2.4) + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.4) + tslib: 2.8.1 + use-callback-ref: 1.3.3(@types/react@19.2.14)(react@19.2.4) + use-sidecar: 1.1.3(@types/react@19.2.14)(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + + react-resizable-panels@4.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + react-router-dom@7.13.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-router: 7.13.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + + react-router@7.13.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + cookie: 1.1.1 + react: 19.2.4 + set-cookie-parser: 2.7.2 + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + + react-style-singleton@2.2.3(@types/react@19.2.14)(react@19.2.4): + dependencies: + get-nonce: 1.0.1 + react: 19.2.4 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + react@19.2.4: {} + + readdirp@5.0.0: {} + + recast@0.23.11: + dependencies: + ast-types: 0.16.1 + esprima: 4.0.1 + source-map: 0.6.1 + tiny-invariant: 1.3.3 + tslib: 2.8.1 + + recharts@3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@19.2.4)(react@19.2.4)(redux@5.0.1): + dependencies: + '@reduxjs/toolkit': 2.11.2(react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1))(react@19.2.4) + clsx: 2.1.1 + decimal.js-light: 2.5.1 + es-toolkit: 1.45.1 + eventemitter3: 5.0.4 + immer: 10.2.0 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-is: 19.2.4 + react-redux: 9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1) + reselect: 5.1.1 + tiny-invariant: 1.3.3 + use-sync-external-store: 1.6.0(react@19.2.4) + victory-vendor: 37.3.6 + transitivePeerDependencies: + - '@types/react' + - redux + + redux-thunk@3.1.0(redux@5.0.1): + dependencies: + redux: 5.0.1 + + redux@5.0.1: {} + + regex-recursion@5.1.1: + dependencies: + regex: 5.1.1 + regex-utilities: 2.3.0 + + regex-utilities@2.3.0: {} + + regex@5.1.1: + dependencies: + regex-utilities: 2.3.0 + + remark-gfm@4.0.1: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-gfm: 3.1.0 + micromark-extension-gfm: 3.0.0 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-parse@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + micromark-util-types: 2.0.2 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-rehype@11.1.2: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + mdast-util-to-hast: 13.2.1 + unified: 11.0.5 + vfile: 6.0.3 + + remark-stringify@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-to-markdown: 2.1.2 + unified: 11.0.5 + + remeda@2.33.7: {} + + require-directory@2.1.1: {} + + require-from-string@2.0.2: {} + + reselect@5.1.1: {} + + resolve-from@4.0.0: {} + + resolve-pkg-maps@1.0.0: {} + + restore-cursor@5.1.0: + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + + rettime@0.10.1: {} + + reusify@1.1.0: {} + + rolldown@1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1): + dependencies: + '@oxc-project/types': 0.122.0 + '@rolldown/pluginutils': 1.0.0-rc.12 + optionalDependencies: + '@rolldown/binding-android-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-x64': 1.0.0-rc.12 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.12 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.12 + '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.12 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.12 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1) + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.12 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.12 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' + + router@2.2.0: + dependencies: + debug: 4.4.3 + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.4.1 + transitivePeerDependencies: + - supports-color + + run-applescript@7.1.0: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safer-buffer@2.1.2: {} + + scheduler@0.27.0: {} + + semver@6.3.1: {} + + semver@7.7.3: {} + + semver@7.7.4: {} + + send@1.2.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.1 + mime-types: 3.0.2 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + + serve-static@2.2.1: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.1 + transitivePeerDependencies: + - supports-color + + set-cookie-parser@2.7.2: {} + + setprototypeof@1.2.0: {} + + shadcn@4.1.1(@types/node@24.12.0)(typescript@5.9.3): + dependencies: + '@babel/core': 7.29.0 + '@babel/parser': 7.29.2 + '@babel/plugin-transform-typescript': 7.28.6(@babel/core@7.29.0) + '@babel/preset-typescript': 7.28.5(@babel/core@7.29.0) + '@dotenvx/dotenvx': 1.59.1 + '@modelcontextprotocol/sdk': 1.29.0(zod@3.25.76) + '@types/validate-npm-package-name': 4.0.2 + browserslist: 4.28.1 + commander: 14.0.3 + cosmiconfig: 9.0.1(typescript@5.9.3) + dedent: 1.7.2 + deepmerge: 4.3.1 + diff: 8.0.4 + execa: 9.6.1 + fast-glob: 3.3.3 + fs-extra: 11.3.4 + fuzzysort: 3.1.0 + https-proxy-agent: 7.0.6 + kleur: 4.1.5 + msw: 2.12.14(@types/node@24.12.0)(typescript@5.9.3) + node-fetch: 3.3.2 + open: 11.0.0 + ora: 8.2.0 + postcss: 8.5.8 + postcss-selector-parser: 7.1.1 + prompts: 2.4.2 + recast: 0.23.11 + stringify-object: 5.0.0 + tailwind-merge: 3.5.0 + ts-morph: 26.0.0 + tsconfig-paths: 4.2.0 + validate-npm-package-name: 7.0.2 + zod: 3.25.76 + zod-to-json-schema: 3.25.2(zod@3.25.76) + transitivePeerDependencies: + - '@cfworker/json-schema' + - '@types/node' + - babel-plugin-macros + - supports-color + - typescript + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + shiki@1.29.2: + dependencies: + '@shikijs/core': 1.29.2 + '@shikijs/engine-javascript': 1.29.2 + '@shikijs/engine-oniguruma': 1.29.2 + '@shikijs/langs': 1.29.2 + '@shikijs/themes': 1.29.2 + '@shikijs/types': 1.29.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + + signal-exit@3.0.7: {} + + signal-exit@4.1.0: {} + + sisteransi@1.0.5: {} + + sonner@2.0.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + source-map-js@1.2.1: {} + + source-map@0.6.1: {} + + space-separated-tokens@2.0.2: {} + + statuses@2.0.2: {} + + stdin-discarder@0.2.2: {} + + strict-event-emitter@0.5.1: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@7.2.0: + dependencies: + emoji-regex: 10.6.0 + get-east-asian-width: 1.5.0 + strip-ansi: 7.2.0 + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + + stringify-object@5.0.0: + dependencies: + get-own-enumerable-keys: 1.0.0 + is-obj: 3.0.0 + is-regexp: 3.1.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.2.0: + dependencies: + ansi-regex: 6.2.2 + + strip-bom@3.0.0: {} + + strip-final-newline@2.0.0: {} + + strip-final-newline@4.0.0: {} + + strip-json-comments@3.1.1: {} + + style-to-js@1.1.21: + dependencies: + style-to-object: 1.0.14 + + style-to-object@1.0.14: + dependencies: + inline-style-parser: 0.2.7 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + tabbable@6.4.0: {} + + tagged-tag@1.0.0: {} + + tailwind-merge@3.5.0: {} + + tailwindcss@4.2.2: {} + + tapable@2.3.2: {} + + tiny-invariant@1.3.3: {} + + tinyexec@1.0.4: {} + + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.4) + picomatch: 4.0.4 + + tldts-core@7.0.27: {} + + tldts@7.0.27: + dependencies: + tldts-core: 7.0.27 + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + toidentifier@1.0.1: {} + + tough-cookie@6.0.1: + dependencies: + tldts: 7.0.27 + + trim-lines@3.0.1: {} + + trough@2.2.0: {} + + ts-api-utils@2.5.0(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + + ts-morph@26.0.0: + dependencies: + '@ts-morph/common': 0.27.0 + code-block-writer: 13.0.3 + + tsconfig-paths@4.2.0: + dependencies: + json5: 2.2.3 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tslib@2.8.1: {} + + tw-animate-css@1.4.0: {} + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-fest@5.5.0: + dependencies: + tagged-tag: 1.0.0 + + type-is@2.0.1: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.2 + + typescript-eslint@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3): + dependencies: + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) + eslint: 9.39.4(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + typescript@5.9.3: {} + + undici-types@7.16.0: {} + + unicorn-magic@0.3.0: {} + + unified@11.0.5: + dependencies: + '@types/unist': 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + + unist-util-is@6.0.1: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-stringify-position@4.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-visit-parents@6.0.2: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + + unist-util-visit@5.1.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + universalify@2.0.1: {} + + unpipe@1.0.0: {} + + until-async@3.0.2: {} + + update-browserslist-db@1.2.3(browserslist@4.28.1): + dependencies: + browserslist: 4.28.1 + escalade: 3.2.0 + picocolors: 1.1.1 + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + use-callback-ref@1.3.3(@types/react@19.2.14)(react@19.2.4): + dependencies: + react: 19.2.4 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + use-sidecar@1.1.3(@types/react@19.2.14)(react@19.2.4): + dependencies: + detect-node-es: 1.1.0 + react: 19.2.4 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + use-sync-external-store@1.6.0(react@19.2.4): + dependencies: + react: 19.2.4 + + util-deprecate@1.0.2: {} + + uuid@13.0.0: {} + + validate-npm-package-name@7.0.2: {} + + vary@1.1.2: {} + + vaul@1.1.2(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - '@types/react-dom' + + vfile-message@4.0.3: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + + vfile@6.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile-message: 4.0.3 + + victory-vendor@37.3.6: + dependencies: + '@types/d3-array': 3.2.2 + '@types/d3-ease': 3.0.2 + '@types/d3-interpolate': 3.0.4 + '@types/d3-scale': 4.0.9 + '@types/d3-shape': 3.1.8 + '@types/d3-time': 3.0.4 + '@types/d3-timer': 3.0.2 + d3-array: 3.2.4 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-scale: 4.0.2 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-timer: 3.0.1 + + vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@24.12.0)(jiti@2.6.1): + dependencies: + lightningcss: 1.32.0 + picomatch: 4.0.4 + postcss: 8.5.8 + rolldown: 1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1) + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 24.12.0 + fsevents: 2.3.3 + jiti: 2.6.1 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' + + web-streams-polyfill@3.3.3: {} + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + which@4.0.0: + dependencies: + isexe: 3.1.5 + + word-wrap@1.2.5: {} + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrappy@1.0.2: {} + + wsl-utils@0.3.1: + dependencies: + is-wsl: 3.1.1 + powershell-utils: 0.1.0 + + y18n@5.0.8: {} + + yallist@3.1.1: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} + + yoctocolors-cjs@2.1.3: {} + + yoctocolors@2.1.2: {} + + zod-to-json-schema@3.25.2(zod@3.25.76): + dependencies: + zod: 3.25.76 + + zod-validation-error@4.0.2(zod@4.3.6): + dependencies: + zod: 4.3.6 + + zod@3.25.76: {} + + zod@4.3.6: {} + + zustand@5.0.12(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): + optionalDependencies: + '@types/react': 19.2.14 + immer: 11.1.4 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + + zwitch@2.0.4: {} diff --git a/public/logo.png b/public/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..3509e8e91103bf7a15bdb6c1d5f49e9fbab7ec9d GIT binary patch literal 95527 zcmV*UKwH0wP)(`~0)UCFkjjYF}$HwMup4D&j z7tGtV*~muLQg=%-Rkcc0MRlomq84rd0RkYg?>jP)duA-|e(xE(=XzuYK!5~EfJm_8 zm<1D=8H^0~xc7c%`ObF~n%1|bx)s&X4P z=xaA`s!~e}_q4ZL%WVGY2S4D){ob0^w5B~A`_6a1Lu=OjRKM{T6@97<1sj$0sHp_uG3UMvwaqd>Jj5%PCk}8tUw9jpMkrW=xx9&a;pBn21u+x~77loElS^ zot;e-Q4(mKfH|flN%KG&pw~=bq6sohYuXC1t$?}&glR4p0>~Rlq8;T!`q)%p237&AWy#E<)09+ou$J`T6<9O2sWW zzU$GnrZw%+8P!w#TI-;%uP?W_u+YwN+{r}!TI=Cjnsyjt+5xnxfJ#ItGNZtTA_WB5 z=~`o=IcC02%qI&$@P0>E*ZCXQu2pJ)dE6KId94W`56NBzFx=JE^23FN=M(0a7}y11 z6hxiQTOmM`zuyi(EP&K9r(mu!P*qBmiD(|o^8n8&qFJrGwT^&fo%sfZHO#)77r4F>Uw7hAU&rVEFKhe=h6F?r4 z^>qxiCuZUKN-h3FW9=&d))G-3MEAYN>;efNOIUdj88D{=B&4WJq!z$D1-26)HLi%R z6L3QZ!HqDAZdhw4ZMi(35G_Q7LV0FprWu(ut!baadW%I>P1BZ|wWVATwCgbJsinyP zvmJImp*1|fK))jDW#&$1E-_G0N=3}XXXUJv?4kl1;NjA zTK}}l{K3M)Lgg`C{^z!45&a?9;NW0jt=$~YnLlITRS*rj$hYw{XCHjr^?#CIZ_J7S z_XsQyIapDQR)G5eUJXLZY!hSQqKy}EK{z;OW|J5*$z=MG-EJ<=Q8qV^MyXCShGW=n1%0NtjPkDi-33P+N z7m3ta0?tG#JU=wN>e|)ut8;U6b6?0kXj;=&y1o4J%P21_w4ObAVx(qy9b4XL*=_{$ z8iqs~UCbx}R3t)3MB4wKEO}?Q2gNgEy0a4LQDXTIQA4GYXlcs_Sc+4erAnzwf!06n zXf6I_AvVX)PEXgq$ougytO+2W)3$9>g-hqdf0J7K?~TPC210JQu|)zp6;M))P@)Zz zMd)H#hXn3x428s6#LS3^;Nl0{xE^H@5{Nl_-8#_73%dWhmze?4H2~)n)M>5s86Dze z80D`PqE%DtH?}SA-@pIy+@z*8?J-$LKCfb>+N`xbK@beGvFq7d2OMLT?Ro}A-P7Fp z?)|*XZ6lvz<}9q>`oT{?M1(wm9I+HPbBg8rkQgCIik*gm8Uu?U%rY=M=>Oj%&o_yuT|3M5!|ZAfAgFfTz> zsDN$@bTac+hN;5Z37hDn%<*rvy>@tLX!EIjK0g~8Q{yBt3(fRa)4sf>N~Mxck|fvK z)ip3TH@B^7?Q^wS?O6hCBA}mPb6HAC5lGe9EliZlPcMLo;5=Tf9YWWe7J@!bA%B`2uGoa?bLsMA-s&i`{0tKnE}?C14(e zDFUu2qT^cY_j6%*w5?P+d;Rw9>1wrVAMYJ(TGJk`jg5`zx#{Vi#d3LLk|f)S=s5;9 zF)-w|B^WJIYgE=3xmn3!@2Qatt5|kV1VWMAK866ssA3FipjR{c14eHEv=Ae55eZ1L z3z`^83Yb2Uaj#or#yk_wrGz&s3h&eiS5miE8CIZ*5U`$(!tfsp`RKp4hQa=`w{E46 z`#wIXH6xJEVMV0|%c=Q20Bh?7HI^01*c<{c6@VA>5D&##2Nn!ckVlqWm55no!GJsQ zV{1{f8JI9|+c0jWgmD9mryf)$3}M<3=9rcMC1v)c^|~tsb|HBum^%qr%fJ?9e#Tn+ zVYOEKAkz9|OMBbJj+`2wj4|J2a5e2owvn!`(rm3da{2P*b;_-EYqv45m6^vfDm~SU z+?HL0pq5ILE+)tmp`?Jez(*6k@?i9Zz@Upbh|#ARZHm#N8AS~Ni+T~=!H$n$SoSe8 zFwqPQy4ML9?UM3R_;pR;dg2{O<+BT=)TFzsF<;aHec0Pu9G{pdKM~Gd6F@$rjkK0} zXOeW6P3?;S_4wqHg(kx45cqn5@q7eyC`gLR{Z?fJ4PFi)(2!ac(`3=22qgghAZ!j9 zh8YP1i_Dl3AOWtK09Op*Y6{#iz@!1@ETLkvs3zMe@O3;FqW4+mUZm+R0%nw=)0WNA zNgKSM2Ksn0m%E(PdV1B^Soz42BTvNQZQB31*4x{Q`PuoL(yFb!vtwvxc6Mi)n6Fs2 zPg~~o09r{Y?ffccUQPkKgPnjJd>x}z`!v+BwpbMsRt3O_i!!uDmxe?kVO9q>VA)$$ zCd?+Bfe{ISD0u*(VWSeDvg^Bw?qX)DR1DAL3I{rTD2LT8u@*V@$Xn4X&YJ!9-wz}yc{nO8pApe#+~J$(lRLTbqX zVs%K^7yw%}V^zrL3j9IuR=iLhS)t5~iqtHfb4FM&3R5XC4J&&)21&u(#ULeObSt1g zAoOWMyW79Y*F=rAs1n2>bg*$FW$dqnxRDYROU5c^EpMpR;;)9xCjc}7QYk8% zbR)n|6H$({f_4CsMY%PC0Es*JYwqx8g&KgHhHy3^oUAdfTM|#kdo6^#3yl@fK`dTM zhbFW+Z$+^z#DemK^e_V>5;h3X14gS(u|6Q|4Ha&%#pM(@CxttW^C@FG`&%Z%0_BlJ ziigb9m0G*Y02>watHzikG10qIEiFg-1_sV`Rx0yXu3TwGBu#tlHn4VWRGXO@n4Fs0 zni%tfwf1=yi7M)K!Gv?szh8KZ@)uuz=U)5o)h zaND{)3yBnN8I`4ufho&aV7&w$o~uQ|%4#QIBqTf=0ng@uH6f!wK3Z|lH4bDnurH@% zZ{D;lj7y(>=FXJO<_fC~&px~1(&58r=O6DK|72?d$fvZ?(NPsL_b=X_+`vqu%r>mM z=xzb64H+8(MwGEQNOTNI;4tR(J7;>{SQteID&*;dt;FaEAYRoFTdWI#Es?^O$l_!ITu2#LQ!9md7yURev*bf> zheaA_0nlQZ`>in}7TA;+v!inL>Y-d1ez1CY_;Pt@XnNwnfhVT8*R(H>X|0RJQmJ=t ze0(EY`z)+|-ZF1yU=;!F;=qwV%#rOmw-;L#qbCGb2X0RS>osF_$Wqhj4&0&_N(&Z< z1uUivVLVm1ZV1;6r1m&rfk{J{wF>iwP=H}fnn%ox0vOvO!ul9EnGzNfU%=Nuftg2*wHvQpz5StkYb(7OxCtPi(r%BB=kodd z1_stz=2kG1JBS?!v}?w?fH4wymX$l66DC}0@82^U z$1}R^wHUaRkT`hP3}MF75|#dr!X;-S56lh4Oh{VUG70(R2t&Y!m{4Ga%@IRoG=2?8 zL4H$caV#dBOqA?F?X%e3YFBLEYg! zlr36!u=^9%dp2*^6nkBV1bLC0mt2gaQvVtm=bh|QUj9s*0%pL$coMR)Y zDu2+*nVx1SB94>PH5g-o#aNc68V*?^_hQz>I}b<=<9Z5QPCZy$Ock!V2;Hy{QL0Q* zpR`#Ax&Vgxs4|e&S{_OfjInTMy|oFnb0AscR;utul_gEq9TIx2cMMrBfUzl1*cvLF zt<_UuVl51;H;lEz!^3Z0zI?d}AYUdMUA3y?=FOYi01PfqbR6q|Vm2fWc1#Kb*Cn-X$5dQi4o#36;fBa!N5t3|GS-EDop))HPI0$?rVJ!4<<`A( zqAn(ctHwv;o)WKwLN944`_ZWhN-5;?1r&=#w6(RNy}cbB9Ub!TFbYv#DC6q2tGImm zG8X3Nz&1sqwE)zC8p)pAn`22$n~ag^oXyPv<5wypyLjf7CdEcNeKJ`U7C2CEN^Ix??|#ffF?soiN6QF6={U zNeg&_Kmlae!;-u9jQ}>WqTl_4F$YFE+D|W}W^QOD5<2PxTJyqwnTad$(0Hl|UtFj-nu7#-g-}R%Rm`l*@MW zvC!O>wNVKuLU`6p^B_8MnSeReL z2gg3ZZ+`n596o#qb8|EDiNy$TB2nnCTB*b2gyKsu1awMmW@4`4slofAJUHM-9mK{NUc^w*#XDm0%m4sF+Dwn$;nBKkB{T#&0F%-eea$p zKvFk(4*?dTQ6Yt!#^Mx!&Ke0DWPJpz)xes7F&g@)#DPhh0`la%G>f$%upXPgV|*zeZsnMHO`4{?mS|H-W@jx>Z?%NEH*dGKoxux&{VyXdTtFy%CCNDASv-%#OiXd)y+sIF_R6)DlIus&gK zMV8eXW7xc-b)u|INU1~-;vmq~f(pu{uowV~CdDEJrff8qZ?*-MKil)#!l5JklMl~V zzEq630ziN8?QggLh8uUO_68F&id0aBc+_v_g#3MXMzvGi+f4S}a!)RCpE z`lSv?(l2$(63!=t)2YIlm?Yx3X&5uc14~-hk#nC)LzzKojn>vy3=Iuo)#xe=kBkUl zxbORV`q0zUgYKSgi9TF35{98qOEF{Hwyo&t?M9`th@;1jp}bfY=yL#ejDXWA&=xE^ znK=MG8nXYcq$KPvko2vTtmVOKT<&j>W?Yfh1dvZ*HEVm=ih5_eTWdCxKd4g#2Q`F_ z8#7_nlB9<&q`m{4C3>)EYwf^mufBqBed}8o9UYa|b&D@ejYJ!99HUaHU}13qb8~Z; znwrAI_=Et+wQJXKUA*fa0DDlft?O2&vMjD(77(slQw@J_kIkfz^S=+RzG^ zjA-9e(yM%g(ys`s3@~OOVfBf`;&c}3yVo+-huGS;r?fNk76b0FDX*O~l`%!?XPY){ zdi&I=Q!|hIbNV#)f2`T8e|Y&qaiN`xlFssEyON&Ql)%>xa{A$Wrp3Q~9d?}|FLak>3GZ$5O;eRicD*s_>DtX-Z^3&SW zLqmlN)6-)=`q|HR8Rk6zc3Nhc;7PtWPf8Dly&82POTlgq8QUU=+JifMT@>P?5$Eto zeJ{~Cos#Um%Nc28){s!@QwJ6T7|UmJ6ou&N=@pO|85zOq)vK{;)hY}R4WoacAKg7Y zXm4*rOR+_=2f18M9KXBGIfQQio;i0G6Sr^U+`020Yhe`Oh6Rqs3adjSPMq8G?wVRP zV|_?CTq9wDc`^V;OtufKhT7YU!R%~##Y(+50ptN~&6+isotf#g%xm3Yf(Dg#cR;TW z2yNO*!{3}`++xOg7b+XdD1Sb6wPC{seC?H2uxZmKS%~*DUlTD)fHFTnkK4CzGMK1&0Nou=|qF!cn zFjIlSIb%6uSPv4u?MHjogOVviyxgqmom;SnjOF8sy{57VV%9~${nMFk3Qmd1NFN(> z3Q%I1s)=zDas+e)R(d$LM_%D!Eb>GW7yT#f z(^4lA;KQ22hp__>!ld<}V%Ya-DHu>DbM5wGp;(k!h69VSu`z7kyjk|8`@XlQ2c_0l zd7cA{kC_W6uPv;}Ai%nH>+r(!&*Rj|lel{MDv~6YXk&pjK8zVVbHK*XNJMYI$SaaG zR);2BN_~pXFBsdwjKQz}em?)biE@k;Dx}>6kOws8xzMuhGscWCbD>^ha|>Zm00gqo z-Om=8a3y72O9_i+Sqi$puMhk7?ZcMMn?Dvj(7^C;A+igVJ3BiuJUlE5eE06%n3L+4X(*dK=d?R(OjDQ!dw%Y8Ca= z*JA|~{ff@D94xG!s8nCI%wJ<*CxHgx>)k?1F#dsi)-Oz{BmW?(S}sN+sEw zo?WHx01j>tFokDiE`47I5%>RY-Mj^xH*LbvBS$eYJ}yQ1gf-43R_ct~bG}~5S^B$Q zlU!3kgp{3EA{|-$O~ zA!YPcW30D2+&|pe^4n+k?K}JW>#u*Ii~m2o90uR}u%+vEQXNbZGrDM!ksqEpGi*o= za>{)S>S7Ao31|bjD2!(kAPg9!hLfWylj z1$CvPSiG@iWYy%4j~sa{>8{~Ip?B(c^n8`mSD3MvfibtlB$6YB552tfM@|77@{Apk z#mPg;7UU1ihQm!0--bT z;*D)Yq8ec&e<%KEZx93>*4hbVBHSy@ngH?uwtxSA-P)S(H3t0*b_b6X_T1sw0uugt zZziHpmD&U0ymWng%_?^U^>p`OKJC#e62Qc;O)16_VUXw zoq6}&1GAM%<&k*5eIdem6&6!tifuaV{ELG;lyGHDYI)288?EJ4EHfa`#*DluN5}{W z>*qjF1`x~ug%W?pujmS4mJoI|=0a1TkTc7fPRE{w@CC@~guU>+ynA~T+_Z%~#TqM!PK>r{%Y#77y&pn6d zpMMTpwrxRQU!O$x&M~`}-D@A-%elMPbJ{yPuztgO`J3m@okKi5BTO%c+Luz6#@;O< zAxAzB+)94NNVj5%o}4dXm0)O(HKsF;Y;L@)$^|oOxCVlhvDH7?A^N; z-}vS?@a9`@;lR7^;{EpzOS-FCtx9@}GCEqDFU|V4R%Jr+5jjK z2_?8wF*a(JsO8P7!ofsg()zSk00qU0m~3lm?2E>j4axlc_Ctpc{bTR8-u>6#zdoJ{ ziaivQxz@uJv@C?da7bn&A=p}GyO|SS$4r9^bbu(AHJ)iw?x<&~cNKFcX6e6Gp2=C) z{jM|5h@q7gJ*AQ+6hij^S_1a^WST?;Ag`?uC<^X-MM8hR<6|NZ9JSQbHJztjF~DMC zQBHvc!&poqi3<0<`*$rRq?VTERn&zGSY`ZQkqoy>;{d#(zXLF(S%AgnETfBvHdu?_ zOKmy_q&EOQ76y1Ppy1?aj?SJ*lBu$_@gta{Ppej~D$e}+H{0iu_z$h+*DQF9!384L zlnIUcx*EyMoJ~24eL0K0d5aB!5;bcF$P2_c9}^DO6yB*aPNa|#2eDlq{A8`~40rpX z)LO#&_3I^NynFW^JoEI^QY3FFm858XzdiUMQN|~k1ErzCL2TZ%5%0e9HfCpMVQnfI zfa?Z0owCsITa-uzBhQRMZP6EaXUrZEg6u-_#?rCV)JcX;|9}*e(V~ z^+P8lD}}sXeW)qxS!Yapmz5nuY-zvIIqe* zT3TAr+SZDpp+W4~y-VsOM~)o9p+kpo^5jXWpQPFA$YxSXSSqn`hnNx>hSQ0{p-c+m z**wG(>{5)joY&+U4S@|c;O!cu;Jzs1IvLtmlXDCnPK|lR*3dt3d}2#?XX&S_dZNPz zPK+;pp6mbGP81J+s5cmwWOPO>DTe+$&{tq|gzf>90!Y72A@K7c+#?``K)~3_*FaKwD$7pYSrHK`s`V)t zD-!( zE0bLyX|^o$k>Fbo`k84}3VPl!uAc^WL9Atk(9Q#evpI2%J4O>8J7h^UveiXP?E`*hZ;ubar(h4DNjnKMc^oUCv#1ceemoZ(kp7 z+`Q=>Hv*=Nk`&gYvFKD*G`rlI@k6SzM&oSo9DzFs)ZgFToxgbN*6c%hlMmUN0P-M~ z+JLp`Wy@V6uA7|(ZaEGGR=T})wxDOM7i6oLY%sD5%X2yLe%vF9qDS%;mkO~NkRW54 ztX{oZ9y@mIz|NgJ@ys*N;N5rM#j#_@q-%C+Y8r91DvU3ZHdrMb`3d76=TpMz&_z_h z&XA-=G86)R0i*+dRA(AP1RPC(Icw`8jmVBr7rA`}z(}Q%^j?`To!{EF?%?W|u1x&L z|M*~?zfI%69IbwJS8bA#t?yuOUJ;nEW}dBa@_*Tl#N}~5V%MJ zXb&0PA*0s;Mjq(RS@h)@g9XAs0q6`F#Q=y@Hhscp@$r!j@Q@B+n+^PZ-RUb7jrb+;)>_Ei^y|maSzPB49hfGsJw}0>{fSJ~%ve`*=r7@sdeu6S*KayX}Q?_0x{h55rJJ z5%-nL`Za6q|HJ|w+*+4dN5-&nhfiC;SQ{{Q=YikLTOo9`C_hT`q)>8w)bVyr;X>>q zG(W6K$k4LsBCXNU*@X>bW3tX&)bZSN&k1v~k*reRBWS2C;yzLR=e72Z4y?**EQLZr z07QwHqT5%%%@o3TbP=m2uNKj(SekuPscvxN!8dxB8yPGZI z%ta(ywrmj^@z=lpb-e%n`}pl|e~WkCc}I%w)6-K(;y4q5^z%oYxv1n!0$hm|4r(Q{ zPF~Cd+jBP4{<7$90eS+URQ7!KNp~35l~MimYGz(jOVa03Vz^#6p-jTVSd?pwDF-Hr#*GT$T3O*rnQ*zxxKaVe zYlPWYqn29v9BdXbH_p`e1OO3_?J(8>3APgOO=Ii@W#c1C2M1N8j;!h_o-gUzTxv{O zeSj2KXV4k;bhM4$sEq$Xl9>O68Bb-P<%g+dskZ<|dXb*XGhWWdKKCgPsy9;LRHE>^ z8gQVhaUoGC%VZChC{NXaz72o-n|5BW!uCA^nCe_DTokfL503r@F z-1~A)V{2=xQ0*Jfvf{dI}G z6=Y2S`2>6U<(F~u%-L2+JB4-V7IA*+%gXj+p1`bsG0G&+%<5x8$b5G_iWV8FCGHGkkmNLI+VY;qP zRo7^(Uw@&k?cjmAxjX5;Yu4ajJ$tjb|EKY2t%hwsifKEOeTrGFWm5eF>LTKt-+D?u zi`3$dT7iqEiUFf1A`BH-Kw?8F1CD}=HW)3EmLjxDG-8GJa%Yf`>A6q?`zjkLSucoZ zPxQZ9dYZi_|4N=oX6d!>CT`JP4`fNUJAc4vD*!!tU^NV?LKU}IOdV(dlU3k)g>a=p zxVQjZS_CdvfZH*lZ0b`g$)~#fTUW}@(Fg%5TA*aXtH|>1`D%RO`#+`+!d!H?5YmUw zuB%>|y=oVa-$z4peV;1coLPL~%53?YO!xx^ws=1vLmu|C`#Ux8RK(bmv)B~@Ya-C7yUWSCLNy6w zAw2E~cn6C$l*jq!dfDWv2Z z3MXUWTB@Yi#d%WC=2(Vk?aV>sBw94%ts3EkJJ3=uEXjmS-;_$$Vn+hot(jX+6Lkz1 z3kS#Ym^kYGPL(@qH*XI7^N-CKn4YqP?F`z=j8PKeitkGHV@f@hnyn{QM;d4ifR2#R zn_~frwJnS_tsX$u6bb!#gVs<+H!RPOuiu_b5^2dVt^fQUViJv5y&Q4SQdGhBA@P2k z1K+3^e!r*Hvt~gSRkh?o(uUr?Ug`RN?z!ji;)^fhxt%-F*WZ8NHGj7??vqF5&>>49I{@kK?t)g@M?6F_ zkH^S+29*OtpfAv-i7N=b+omL%Zngi>u`sXavHTIG?w+0STA0Qm%a@4X*Ki?MEJ za2p6YSxF6!7Tm>=Xd`yan{nYj=BGZd0yVwFaZZ zqvFXOJa`bl`OR-|;J`tgK7AUKlea~G%Xxo+_Kj7}^PVt-H>J=HoJur)zrfg407fI= zkEOBF;8zNGr=oB@t*ec)=uSzjH(P6WSH&?&U+vyxKAk5qKZL1)uu8Na;g)tv?L&`AdKHXMeW%!|#8; zFG-SpCZ&JF#4iBURZq}~{0$R26=5v05_Pxh z*F(5fCY+uFPR;?x=NM-f88^!gOkA|6z=nL8TPVzu;>t0zbDo~Iz^F~lKC<>WrL)I> z_`~my$0pfenD;Pa8-Y5QK!&YEH+k}k&@bdI_T?E*M?eqxl<500;q4eQsqOD%+7P_=;)9_+UM#2cvN;bd#et1&T|?a9mV?f>x8xS{`>DsYHMn8 zDyxxL8Kh$sVU`sR*C4~MZX3qloRu`irU)n~i;suE7b3R7lzcvaSm(V1Fqw`@0N z0+q#jig3B{&NG#qckR5izC5tD#Msbcu&E8$)Jj-eB8=pLQkaQ4<7E@rLV1YqcJ3_Eqk?^c#3^HRB;GA#DWQkuov{yHHA;$Nm_YvapicQ4b}vZPq17*U*KxB zTc@gItPMB|uM0{^&N82MCZ`#NcY&81njmb9{1;OH_`G&^YO7EvVC&Yc!ew_G_jkYh z9e(%Q-{J7#_eF1OadANufHnKmrN$(O!gZMk4Vee+4vsYei{fZsKr+(zVAW59v*am2 z@cy$hY#UC6I^D+?#Wj&oZhIrfnj&LI7vqHW zCAQScJ#;>c$<q9Po9S)4>=GNTk%=8hNuu zI8Y^=O-a%?7QS=bg{RIbX(^SYcJTBw&j=+~^y9W{k($ENH8(guE7hc-u~d@CzO%Cv zqY%ia(x z3gymw3yM62cOfO|E=HD5hAf~~Sv-yXA1lpkx?3=={=HI4fN$f*jWP#e+qP|Z@8Em* z?eBhzg9i_YGH{ZlviY?RU}PQ%n@_0T))X$P~68XL%4H@_+&dvMcyI~fA!a&x% z{1xrB!m@Oi_5B?sDJasshJi#)MH)OqMsDdeXaZxE;o`J9kRD z>V@ZD5C_Jsy*hZTw7v65=B~-Te;0+EJ9`$jOp(Na^1LA|8iveS6j>7zVoubcTH{BQ z=$0r53WY*}rl+SLdYQ5pjd42 zlF^TI&~ViCvL+SFV$eldz$ap2sPG7 zVX?=0DUc}()NCU|iI@0FLc=xwyHI;3)nEbG(#m+M-9Of~0z(C$E3)DVi}VQB!6P&J z=qcUc&#f%??V%dq^Pjl0)nxg{OJL(2puCInB6#*%k+8l5Jkw4%Im=i30N_lgD|4*eLAbz#>j~l4RfV^!o^&x?-y<@Y!}vqCw+9=?#_+-myM&Uv zWBU$SV~usg#=Z?aVg}X2GOaZ_J37(V--n=H?9Zf?=PYCfJCm113W=pPAfGiJdB=*B zQmPn)Vcy2^io4(@fP8{gtJN?~)1tK|^q;CvffR|+MqYu}?S(1>6AV;%`7;i~0Ij7~ zIe0%w>2oDo9YpG1-DY05ZXHHONAS${?KpB|pQN_leA5@&=jP{Rye=t?GAkU53DZX7 zXbhPde<1;`r&*dqIQZ5B2mGdL4Osdcq$u9bc)G(1n`%vg(U~J??O9YV!n5~dP~8I| zzPzAuZ+nn?vh*wV(Q`e`r-g=1SldF9E_|+&@wKwW>3PD@S;CPS!ijmpjWS~{_32v4 zL^5S8I~FJ5bfWNsMMh6e%V_7rHDDrTBsNP<3ac+bZ*LzqZP|=nyLREl7x&`nr=F6+ zb7Q)y5jdU%&_MQ{>x|3JEDA}x^DSkshE+Es!ed95hV z#ynmM=RNGnJJr5G+p}j64j(=oS!41b4(oJDw}hoQtbJbH3UDxPyRWG8Agy>Dg+f7! ze)R(SlNN$ET74tuqMUe#ot>T1N3vnV27Lef-^YOi2W0S-B0p)#FpKv&WgxljimeyK zea}IX-HM0^J$Z{QZH(tTfoD5_u{M^D@wR|Pmeje)GS-3Z9qkz_E-Cy3nk4qfd&Qn` z;3?NaRSy`2R^Pq9wUzLE2jS2x;hjm~*bHI3>YXR~-<1DcQ}%N?=Br*S-ol%_P)3mUw!p!*!k>EaZFl@EqBmzpG5GeTRk0IDwWXH)!h(H`OX*@fw=op zPuaRXlOu={L69{Q%4fkgOl=xi<`s7XngH@KHhJz`kPpLBOw?wq@q_(^LqRB#&sQoL z^3Ouy9ha$n%SW0!XiF_#a{Wn+KkDer7 z8D)#a{mgmOi`FlO`eqEn0EJ>+!ts?z;r%#koU+#VGc8=CNGM$5|6V?qioOU80_z9`|X7*?poqGLZ7oM@Zfi7bYS*kD{)^3wd4` zH~`5D1M5i*#qVWxFig`F*REZYwAFXN`(2qevbeY?prU2ClpoKTW}-f8iojR<7_as- zc6KlZBFHc%l`&3C6DJzTv8FS_jBQ zkpRe(7=hevjd|U7HX)A(19S#!wO1*#^AS1MKNxY;N-d zn-u&wQ`;bD$anm`$8((z!yZM$JKgp5i#J}PE$|Wp+lsf`76Qe5qugY3`9C7kyke{gAosCyxlFaiMHhht0BP9)OU-Y-nM&95l-XiPnU+or z$~fR*7zwN7Nvna}ZS~FI!sw}~DG^oFsk;SnopJ)tD%>YGMo& z2t9d)jjfEHfMuA<2eZI@;%VDNN+fBLB$1To`1phjHFFNhYp?x21_uY^kiFM*<~VOqtRPIaa&E$Q*4#NYi=x+}z*0P-yo z#vct^ywq#3retNJpI0NTkJ(tBiE*#Pb|r6|2eF5K$ls?|@L|}4M0-n76)htf-O-@P zh)pUxq(21yWK^NQ$oTOM;O94it7R|kua$z1l#B|Po14YYe)comzI_{$lT-M^Km3jm zGMv-(DCc&3(dPDtq_A{76JeK}t@@A;YRFYlL$ufM>lJ5B0C`Yr=vq?^yO%Pb%y1)P zC3~LRC`!xYNwmhz8pp9T3;z7)KgU1)(?3bke0Fv=Q}YdANyAt|q#2u9ebn)4ALIEB zi@|~)0c#t(#_JB&<8-ON2zx>>@Ka%RV<=Sr5gNPQ{fzd`i1AXFLSINi&;I2&;m9;$ zBKA+wq{TI>RLVGf_%N!~3Z|we@y&03Q)Zgii|c;eGCgtN(U``PY(%u2iKrhZ?%YVM zPYLPE7F<(@)_;!gyg+ueY(X{uL(S9*7%<1=jSD7{evI;K+;ynjvbRt zEdUt=OLW_=9I&~S@r{1QHwGBbv;#TqWlCxWQdFS935YnSWO{m9)?1P!*uH(c&|e?*{PHit+;zIB zN@_d}taNJ|oKOGLrM{eo1+!u=xCtN+!8r4z?^HrvE;>Jn3m#d@s#2+lclh1!epkTb z%9Seu9+Ib4D(fs~bVUZQ^)bFP$Wkot&3h(EEv0NW3V&3hX{EYVkIFs^t@a7DI>0b% zVHjz}7%l?;Y8Y5wWUML@{&@m8v*6Z;_qBsS%e?nXmoDLZ-}@eJ+`NhZ`QQCJ?B2aw z21&^vv%9pgo-}jdq3BMbPX``mX)Ev@12VH8t!Qfk$j6z=y5MN(%bog5cjxSh?)g zOew`a@sgc@ncKjP;}#f=zxs%jxL3A)oLBMSwp1*4oA|~py#3bO!q@-h8^02^jQf2M zgwpin4u!m8j23`>J&ZpcvDnuQ6a&B*Mv_Uht9rS-e(zQ)p}lDjY!BX`Xl3_!iGELG zbnD}(cAz_C^hJ!eK;f+^;7SFk8Gp!{G{NnO3H?2F6m-w@GiN5Tj#-KIcf*)6a`pm)&!9ISQv&JYVDT4A?XjKx?~)(kso(SJJ%*{F3Bq>C&mw)8Y}?6-_PjJdnJN5 zCV-O*%J+-}q0B>%;}~zg^(H26Pe@(lU;K-IAv0<2Zp?i$>LT@7%1fZ&>wui?4dyI^ z;d{yWPNh(mfh#L8^lAdgeN4t2T?0d8Z5yu1c1^bJ+H7xj?PlAyZEeP8W3z4BZJTX> z^M3zeo-^mUap}yuQH2>|Ok_KWS!bB;NYIT?yZ|Y8JIdW4bzkBk|yJ?g;v35v(W!f4~&eAqv7jXT(rHJ+uD*U zCNpfTxEhe|uJOxXXV`yOX*CMN6t?jC4)%wl`^Ub*zaCLM-g5rbx7NX8h2;LW{#a}a zBsggMzPkYtlim^dK$$k&J2;PyFSpOCgZ;M>Vzg_sGSB5xnH(Ho zZBVEtVg_m4NODDLAOPYTA7-)&(;6}!R1;+4NBm!AzM!Pyh_J3+>r%HS$0v*3Z%LoatnHv`1)O7q&t(vKG#)Z|d7YA>mXWOiTFj%4NlJiEwo zOh&aX>JT`%)3-5pr4$tnm_UX7j9*~4A*)|){z4i#-%_cD)vB7+CzfBEVu(o&1^;p& zh`LeOcfY3jCdU2tjm?wAO?F8e`SOcfd`&OOK zN0zjwOpz+a9G zFg`0pbDz~w){S9P>_>4mF192KazqvvFK`PzHsoy3s`=3sl=ps5?eXzIJSN^3I@GYp zCv8pZ(GJznJ;)|}%op72d=Yb*oeD*?(VqMQN!okVlzIwVrPo2>N|E1O`>d1=2KgU!Q%vQ3{+FqTocv#KbU2kkTtZ4ub!v*3p#} zQO5S{I3}maTXR@NXZQAedi^>sU58HrNHSu~ynU^D&s_1;(Eq&uGgE$Qm!C)uXU3|EPD!Vg#7M9E zIC`?a;%xr!`}6Jco6}dm_F4DCwoc=A754#dDQ-L?%vcMBTrrJuVFo)mbigR?v68xZ zkLzQ(6^F1AAVU6OnGQ`@Q$GjWLdVVAN{&26`p4 zVcER|!(1|!CB%Y9a8%~4SQ1c*CKqcj5k8iO~)VK-wDi3N*24{X>UPz!zF3cj6l43AI~+Tm#3 z-uObDmq%Z(5JKhT8?k|)6nGi&v9V>2(f@fcLeo=>uDE z)Tvy;RDm?92FsLLrZbJD$RxLxIsEs2ea*7%eI@YzI>qgHK41(a#7(8rmi30M2D#B8 zIAgAC&3wEu4ia^w#mXYg-^^|(O-d2mqkYyS&0c7ttSMCBS96-vy3Xd!GcV>04T_KT zMl_-j;}H*5>x(R`t;OWzqB%+{Dk|t$FPn=gEPrb;$QE%{N;hPd_cvXo1RW4M5mkYa z)(aqEjZ8VM61BYa48FF2QjN#SlIX=mVVFcpYgk4|4%Ri#KlV@6PefH5R!m@Ylz`q2l_97^Q%=uPLu&1;qdM! z6r$^!y3dVWzy7ND$}sFRs>fV4CxbcDNqGE;XC*?D_Exfig#LW$d1K7N`;{=YY_QYa zMJB#HpQB!5p><|#t$i z-aq`0-xK!b{k^OrN43D%2sNskY2cMT*Rw`PA1JItf}!vJHU>2X9hLq*e_`D#AZeNm zc}y6sIO3sFf$3t!@ADGXm?9)!)ET&cx@7&lRO=s_)sCnA7LH7$g98@RriXlBd|(vW z3XG`Kt?w_A?2u=8^MD!v9yLI#QuvBHm8)5O%aOrhWY)G$EAC#+eOyy zCC!X8J_ngiCG-zFD8HuAdZDvo6N5`|M*T!$EFG+@z&=*Tr-2sCYcG6MT=GcTD@62R zG1$>k^?4?})_}bCx1Nd5%F62G`yce;%PIj{!$RNKqC{(URnk)%nXEWx^Ys`g*qADv z`v#|%G{3qv1N&xCcz8pTV%<%jrscjd_-cV_iC78Pa6ZjiSpT_Ui$_xRvnxcpKta;# zKC1q;#-oIpubzFb*Z;=6^PQfbS-jO|W*VLl3~vM<5FhpEGE84lxUqv43rsguBohXh z&?X7BR-)Oh{W`6^SzJ(@YD(5WG$+Q#$0AZB$!lGqjHZLx7u-hyK_13}SzNl%U0dZp>p+zHW$@Lv{Mg6NpE^e3EOqS)8sPDZY~78Kv&q!&-NgGk zR5QVFFiJnw$7>O~q$fy|0>IA>#H`p4`ixif?`%3plLnXD#AC>>O4 z=lR*t#wY+0CJWB0Yn?aR@Jq8+!VTC@O$1Czt=S5kY}^Cm;|Kj z0(V-{q?$#d5Tn8*t{sE^4c&M%JkmfJSi+6aEwno$t6BUYOGg7nG#KO7^=aafzf15T zsEDrD7;w#3?&SJ(-L;>CdvA7nHd65D)#hgAX-1uQ;}a1L@6IMvipD=lM+JNvB7;Pt zAM3`8g;jMd562);Q_JN}88i9qX1~$$s?CFt9|hF__Ua$!uXe4nouC2&Ix62t3wT(`o7z3gV13;E5UZ*yKipt7#!qh(mZ`8E;c(}NUts{{b)Z!j% zNLYQo%Slw7^MLXP9+ik!_zxfC@_(kMXa^Co_7aWRB@+v9*~v+D0rYMuD+-@jQnI4ItBQuYi?n?#C?7G-V1 zhkQA0_7NGP*r79`S$_Yd=X88LUvq8yMh`FnFUAyAJR>zOR6c+=6_CEe$zMUG+UFQ( z0^p^H2%t$Gn7tSU#>69t{Q;Oz(yoC${*a0%8nXAh!w2~?DDuXsEUyEpPXkvrfZ=CA z?f4-moYd*pr1|#ET!d1j&*vwJ$Ub~L18?YONd%T3B=6%aHQzgEf{tF$JG-3@;Xgm9 zh0t)8pSqs}`lfEHRGNdJx|d(;`UiAK+Dl@hH3mBeBs$&|UG4P-`fxiAMHDc8(7 zYy$#)3hF*@-Hj_?6QB}G)l8tIQ4KJW#{^=5@6|h5xss%!6m^iX^_E#IMjgV`)YPHn zy0#{iO}7*W@Hd45{-2#Ln4J!LL7R%+-8?01>wQT+MOYrc`|>FdRb0v07t(`!CPj-y z@#6^SAZ7RtrK|#c_*e)~orXC}0JLzqx`$JA>JJYOfUnPoT7Uiaxb+zgI=+cZiOODW za21AO?bg5fXZV;(#4E81zIIeLREJvYm zjX%1GHij1sq$C+kpAC*!ml+yKR0_k74xVbT`mBg%agFtz^|iG~q0hZ|BVUj)a$tdo zOa*Y;+OfWysjk)MUdHW=Z@`1A`kQ$FH zMgrbb?Aex>1)D9uiK6ANGcN(37v|cjRlP9!W|gUbjmMtuL@B5Nq>SK#@1xu4U=^fg z?~bz@_O>!G7&633Vdk@(djgRF%}fWxWv~gM%2dgbY&s~11N*4B5bSF+I1jF(euZZfBa6*gZKTzgG^siq@&xq zD%0hSQ)AyHgB;06jcA|g?^sNL_BY@1A8uPp^WHlw(fBRn5Z&_@EVyWr5dCihAvXb# z%Y$^T5FZdmkv3cH>IvS}s4}zO`Wt3CEm{%k$LE3sgoKRFsfiwR>jG&E4D_PEWBMJ$ zf!xS=w8M@lK#qR-qV6lH(Ee8rQ95lcgnZ2|%_6a1@rV#WFhj(EIo1gvw{VYs72y4! zo>r`*D{V>?#7O}xs&@)UBIT@)f^(It^Zp>*`{SRUp>owylfI<+hH^aq;RDwgOd zqDpj=ElMGoP`aiT%y7hgKQbnEYt{ALwqa>DM3hA^%ZpQ~rpFSlGtf#MGwJVWC%=1D8{brW@D&OkVzPSz|H#`TS!l?jg5_& z-nCI7Y<+sa+?rYdQ^eM#C7E?>$3MuZsj!rv zYn_$8^6PvVkDCes4-$&_=3`cL_1T+zwV`njs~oF`nq6-8C6rsXz0MfxH$mY$BePCt zrdC$6kfVcx;+>jXJZ4yR+Ko;AqwIshtVXb)^^_kAl9;wu8i|XWTk-e8i7UM@9a9w) zdaQCp6_Hv9Um`0M!2H51B7@?237^-wDK5Zl2pu7lrN&X7OxWvSXmNeLouRxl^2GOG z7^(B^vSVInN~EG&EhG5K}5yfUaHrl3Xv`@6~RE37BS}jNilwS=V!cH zy)d@4{&7(4Vgi-#FRgtN>HZlAv;((MfQV9c0QOX&p$Vq!Nh3%HJ3ry+Vc{`W9xT%p zPHAoHbKM8?d_epa@r&DTw7L1wAR^%b^M{2|=i;OXjofS$h4XnI4iQ{+GgcGu%%VEa zl6I!8dsJtewBvCeWd%^n$)0O(w)g_cDwc1Gf`o{?KS-Sis}kHST?k}-^MjIVJDa$i z*xvr3eC+f>;hR_dW)Kan!hzOa2DIU>OYExL>j)TlR~bsz%A!&Yk2scgNMWnD4u)p=FsDz*_`Pk|_rzil z`^T`r7Be!^Q>MmkRB(2$4SO_AALdDgtyF$$6}ZaEbY9#Y0IcbcVNEZ4Thm+B=c1O; z!T0LFrEx*MHNbbzvnVkU^~Zq7!8IXb0)Z${gu&V`Ik;l#0}fWfKt2I?7B>Evd@ULQ zyJRz)g_v(|2pE!?vl!@3zY&K6X%ux(EM|y7;RE_Bx-DeF*do8ybtLC$doV&s1ER@@ zGbrObsLAdlh%qcwgu|tRqD?hpvHxzsLIgtqO6yXB`fo$G=oMVH2O}}^WHq_S4jC}& zLtvRX@d;{77rm#M8F)0!xKy=rA@Ki|&87!bb(@?UJ{4I@K8QnHce(8*esh;C&c-6f zylr)QK?iNj`UFCs%41tDLV#kuYYGTMSNu>fu$;t}KEsZR^PM1CWPLa`mM^KW_U}1)ZSd(5SNF{|f zQ(mG3IsC3!WU4sJS)oLppPz3iHm|3;1yw0Mst%4K=n(lMCP9hI|WJP~q@3Ba`_S6W@KhOV~ueO`ggQ`|I zNDW1nD$Dq7Nf3=_7+A`%-5NL+=}5assW{fPE-=iMSAxH%^%rv{aI^tVw#k!TRx)yS zOG}fRhl8u7M$={ppbgn|cQazv4!lPzXCmV)OgWoF)KH1_E0+ec!#67(&&EjF=3~n7 z16UV&{65;%elt-b8%>=zn$PTaJ#7z!-W{IU6}SAl7;)RO%-1+!FRp14SNg^r#gHgdtqma=uFJM74sYF z#Yi7Cz%wLKQBynXoLP%6qSQsx(X8yZ(6c1Pn&Nsknl;hqf2WLIxBT|UZY!yG{iCT> zfur!|#!>qi#9<9wk48*XBxRw9#>QQn_|ouRKj0a8N|XDa?BtSM)O2jWV^z%Fq7Oic zqU-gyA|4nZT~})u_;Zq%;F6;8U!*G$Z2P_0DoD?Igv{t;mzM+X5Zf=l^ zVgNTHis1zUM9s@)acS26NQ)CI_XVFm_6mFy30SWTy9BB^8ZsvN6dH}+)f^U<&G6VU zkdc8Me23+AqJ_QN-bc&AG~8sfQ%PDk$63nZbBA0P$ocMHkjekYV3z58p;R%t#Rb$w zPPmP#N#GFKt{tlT?i6Cgni(Wy5?Sj7|EUlhMA!67+uGA5T#&2n)D(d6ITp85}gxdn-twGO&iXSU@f@8U))) z>=zbik$K?d0h=qEcahU9h0*ZQOAGw(9OW(Lw1@+BFd)Htgar^4mch0;0uGG33JV>) zn#i~jM`m`CJpCIinYO`7t$y-(Ok7+XvtIULUVLoq-ee|w(4jF-`gU%U}!x`U*rg&)WkF0!4sa)3)-+ z?OX5(l2G!vyIyEO7TH4bD7l+>T0nbbnYV#~jS*Iu-xvsBc@ilqDk@NPQKk2lX@b_= zfH%hU5)(B)#+C^Nw0Ol}#}^pUVn{%9+i!`wyFW2-FY9IDztv?;tu>n8j8W%}0&w;U zn~Gt%JYvXQB@^6!3FKSQ#1SXmQKiCxITI6F9r=CT#oc zX@x_*Op_kfe54IN;o7}>)2^%;OW!mkt@X3&^drf07qME8`z>*q>djJIqGKUX!|Ryk zJjx^MO4sQ;p%XIzvYSI!JzwB)#6nD$`xMqrHM-7eZ&pt5f9$u??sA-tE2e(ItsXWC z6RZg#X(3RkWz>1+6`ItBiF53hs%Q%A^!zbvn=T_0zD(}E+xe={^v*-l3bEGAg=z0HSFMOIH!168lX?IO31>d-?IR0oVy^1ivsvbDy1#AiRw zX9IF=A>$SymcE@t8^}_<>CEe}Z2XQ92QN>|vzAE@?0S>#FIRt?x$B zT=XS$lwS)je5#azZZPJ7IVb+R~AC{zUb={SzGw{_}HIL z>PIF6Wf%U~X{)6|8XVdp1cQp(5UjH~%vN!s)=h2eR5*Kx&gBUQf%SqX{$D#^e0j3m zE>?wdka!~Jx=3L6Qt&tc$}Z9KLQm-+^8jiQTO61+(6&3sb_r#j1e;=1AMxvt-TrF$ zh{MCFI<CUY@;iH<;bTC5rZoQoJj57+f) zM%Rna1jg7*y~MAWm!r53vXD_xy-GRP9;%fHlW-QmfHZBo%dHFfwKO3w*y5@_mP8_r zE$CnDrm=Io+Cq@nfX~qVV;5;o8W>J3F`G0!vs$Dbpm8N0G9h6=;UpX7WDG=`t5aEg z8Emm#xuR*d^Uj|sk;Ps{AI+#oyC|Ot3P1^-qvOv@H89pki0#GHYIIFU0arRxsqu_q zl?=K8G1F+al)t8A8^2ygtweC9fA*H<`Owv)kjmhIbD0y&Uj|wQTOF>SQVp7Z_573# z0jBG7)F)%-ayQsh28B>Dz+NZXo%I7t~@%YC(y+(u)d) z6lgs)9DM6rm2zCci-bZ&un`GIX`-=C<=OnTH)^{|LKC&S+U`o<0_vNU+|F$ehEqrX z$rS)=f&u~;`8Zld0uY_6lg}a@eQ|VY&>Fu@kZKvK|3GdbHEv+baKyuRr5Q6H$g_Us zvLBz>DoiSJkb|+3M=)Si-2iEg?s2eQqbV32Q^`&YES_?^Vf;IB^~bk8{R)(Zl#8NM(1+?LSiWx0ZJ8XqGP6gSPAoBTU;2 zN`26{5!kSnC91Ce7^JbSEm4tpBY$S3)eeO4O(~~ZNfn09BWOP3tk+K^4yeV>b%YN z2dCdkT7R}+j*3A05daAh(LpdTPx@_}Qn7pjEY$5LqgDNI3hWp@03iJue0~E&(lH5O zXaHUIY(ZCe&^(A44S9QnJDP-m!@S!#BbXc)%yfK&x7SomDu7H>-i)yGAZ+lX3%dC% zR^zNo^KHwM^QR5?h!-|G0(OYc(-WDaIqjln2(?w<$XNj636M_B`}S62>}~g>XkV78 zb_aey$x3KCM<^VWuf@BPrUI~AFRY%2ZL&(@n#Td4GmDR!o<&;7(z;Hmp1v+VKd*+- z2r>V66f!6gR5O*+hNE(Jakb)A(YOpqoHHk|dmxOV!zfIPSAacb#8KP*=JaifB|>{%nY@>PevzMNn3((nJyq452? z)5gCz{Pl%WthqX4F1yeeJW+G$Rvk!wqx0|JZ@$u8VWru{xcK-3b44SA?+O5CP%c`< zbUdK|)iRt(0WVF;V@lrw60~P%~LUrF4=((Y|-$z%1Tqu}a?3(h^ejXFg*N9l!+FVx><^ zEO$P=Nd-rkg9(BBa$pXFPLBayGWIO>(*n;zqsA#GLWu$M=A@L`@o{oZg}Vuqo+Pr_ z<*^5#s&2s$3tqGu&ZQg1aE*yekt<*?8R`MJ^qg*aJ-Sw5=v0|aMZa%5Sw1-D843u} zi^E>`SGQ|p^f^{Sls<h%qDW|K^*zI$&K988keW{iWr7{d zpADc}{$y&SoNB0h(%|i^ESsccv#+G?9;r{jRyOrpA8OZM7KhjpZpsohV2oe#TEkro zfcoSvr9r#-#yu~ULxJ7EeYQC`MJv5*+)wxR_U5pk9M8%rKX=IfPvzHb`ZDmlhGA*XE<0CEncJ9LaY!7#Xjh zNJ`5)Kj=}mzN>^Zn`DG0xy+F!K(VIltX}z|;5KP#t$g`o;9}EQ8LKWI}u?HddJ~ zCoYI&b!O5V(K$>y!LFFzppnexA&^nfcvCA`!#Lz1)IO%Qv1wVll>PZG?<+>A=ZOeV z{_Sa(HujO@S_R|!K{7Zlro+6jq1BbQ$NcqjlVS4ik2gV6r{VD}-C?NLqB$$+kI10D z!-7=!cIGE`fe0zfYNo@81J%f(3yg$^0DRB>DSj49qm69xNPlUa%c1Cft4mPLaYV0H zE&?hR182-*C?@sb;I95*PGhUf1SH(o6)Ml%`OPiQzrxtt82K>3E^BeN34w5T(?XCW zSyfdPBa_93dr}wCnXhkXmn=+}qRG;r)Xb90m6YOMLb#VH+e|*>24n%ecL6=Nh1P`) z`cE~gP`dbQ>FT~SMrg%lfaS$|^H1@!(8Sb*e)q9=>ZoZ)I~}0 zZka4=i+R9^RbU)Peu?$^@DdZw(YQMJg{9sbnuUaw=Jhtg&`LQW zU&^P5Q;x0wO@f#vR8WFFm>(0oG%Dl_e3yQhH1EbHYOW03FLI_U7ErZ?NAk3posN+i?P>{0@oO^XgKvFEIv3_qB=jN?1=wp%|!bOJ(Ywq`Hs)q;-c z#k|}xUk->QpuPK6r-jFB+7k3JGB1dtq2{Y#v*7ee4OdZvAO!`Gm(?wI>0OZkrOJ%8JmHzBnisdd79ID5rTHL=Bj;Vo!8=ZO**f*zp+-BrTtQ=G?t)I$t?nEb&Yh zdOalH!_|-L&#}ypi@*rzK-egB(6jJD+7JtO0CKO9ke?+Rs%juZsI}Eymt#ovDz{CRu%~Q61(JmVa_6RV{ z6`w$6txXf@KKMHva+3{jzVyXlnKYQ}UpZ5wnE(ra_gSf@9H#N;`8;y>+Zj8if0vpc zndzMi3VRUF;4=;p!?uCO7QnLJo^vH`_qK<9XX@jq11|NWo?p5cJp+*Cda*>I&H2+J zZJ5c_ZL}?mFlILKTd;F-NR1`7&9}YA8S{i)GXklfY5ZiB zBF5wb`DaRK>7m9ln}Vp|14+R8$#@vXIe8qOX_Dq_X4KWZf|6(sTk(hw;~RJ$?@3CU znL^D{z*|R^7+6~6eW`OMOLC-krpjgEFN%ejc%n85pd}nI1IQr0E6p176VjOyRbB4h z1vSB^0mF62YYaEK8Lk}|ufq)uWSI&QswCrKd7<0M`%B0Qn8j%y(Rz$(rCb*89v=AI zhlzl;C1k=H5zsJb0(W{9Z~HI%mRvq`Ms_4h(OZ0J)VZ`YO*Fy!L==|wK)Ab1X#jdrEGF<5)u&ErXOySw4P!=fHzDnxQ7467nmSCpV`4v{enN}pJ zNwVHRhzwM{$>36I!{5Vw$>h6Z$_8{WOxTCc)D{+L>ivhSx{WP|q9fF9G7g)d;FBbj zPT@>6kX3jeFe0PQMft2U-0a(YC(f2`siZ(I{jY&YEhkh~Gq6gyKCQ_yDu5Qdcj15~ z*?9Mab#{?w(gj%$)`V$nntLQDga@0-8O&yxe`R%aLHvAB+&inDSLh{uqJE_zU2IRpfI`4+)IoiwXXH#Zcgm-{ed$GiqS}5D^H}=uWARP5pv3V z_il<)JX;+jVa5Fmr2i4Z=lDw(i+Rl}zOHgz`n+h_7!zX!r8-yv3+5S#w>ob%AO;-g zV4DAtvVvXM7@@2LqTLq1GtG8ZfiOP|yUZpB7DcwC?+0Cr@G90HNQsPh4jyiBBK#2C z*4X1|S8;(A126l4k>Oqm=$FS?7AJoOxou^@DEHl)s2(AHrrG({UgXvnRF71h$ed0?h`PY)6|~F7?tXTbv<<1_it-nXo}>;NFjn0zvVQ ztq5p)GQ3N0D(Zkv$MytHX!!iac1>E|=qzWVG48LskUv9_QV z4f&G~_=`QeAsxU4iV%IGfp?_?N z7ypF|PZ8#~OP0wajm`u#!>H)!X}K{CRj}00(8CAhgZ*_junv?*96;O>S@g$H{)r>M zgOt&2CqB^C`RlC(%)J3x>q$Q=V4=>bRDg_fC&&<(0hY7WY{%AOh zgLNODy$OBvQPuLocd=`(68fkB^so%sy7M-+XB|qcJ&rL3{*kh*1n=rz48ez4YABh= z7C^=$x|sswmsuLd2L)Hz66@3E$pd@12ynikC&&gjXdmai<)ipOZP!Q@{5cP`T0>Od z0Mn8zQU!2urzc_d4)*Rh1o6u)c0+iL4D6P(UK?-+O|$3Uji6?~vrk@pSJDD&a^!iD zD~8!vfA&c#F|U?BX+8}+#Sf_N*dz9V{tPVLyWlylinHk&6et#!@$%Xs_Y>T?8Wzrk z)f*l7g%^xV7;thS?s5m{uW5|1(+7Ln0x13qU_puFX#sdny+`UjLO&k$g1Hhou;3RA zQ^!D3)aZ2hU4>k-Ud$9M#z*L=sukB*mMC&HxBSRGz3|PiLQ#Vp9bW)trS6O#hN-1~ zMY65pv$_S(o@-F??vYB!#YJ*@hWIYU2rNl<@`=Dva=>NP7Su3)+VZSGqp3NQck0LY z^4%l8hxfqSpZVq2z*W#8Xvv8Uv7k`ijSY?$SN_KNX!uL#BxoG|odAKZMEC@A)IPM_ zHe{y-$?EA~b_0dfX$w$a61EijZm`P(D?m*p)fftst{2TJ6SD>3*$k?LWy2Z{BTUD>ioL`a!6)5u zyX%%AI6{)Eo_J;!vy6UmsY#NseR^w{Y~ILrl=}~$Zfmc1aBEC zQG)kP-}v}BIP5(8=sE{0R7w&Yu*yblxHvf#{@o4HnQiw8;|KE7A*|}qlS}jtaKi7{ znDJ|0W!Sb>^xg7i%8&hyHS%v=%JV_m>P9LL&yzS^>k&S!+zI=7mb+#!wqJu+*GD)k z$<{U2FiW8|RY05RPDR*G69^QyOFqX+Y9gKoeH08qeFbr87P`VZiG&}AsyF$dMwWf- z*s%Q!i!Md!92=z95???8vnmVsf3}wHc@`_qdt-Y4P!wyn4V^YIJ*OS3<}%Ybl=u9u zmqd_ZGCURZUYtn`>Dc#!JvGswv%(&0C1bs5piSr(cW_0ehzL>kucX#=7vy5^W}p_K z(cps?iOUh(ic?H^gxoiDvsi>sqH_A~tB?Mthsf@|xq5Z)S@mMjm7?_OEOGU+@LGqQ zQ~-MnDqr^9_j1z$S2=^IUKPQ81sklf)2n%sHh!Vzo!N zPi+CrY6sbB`)|n9U!aE-dPJetwGwTM3L42>;(>=s9t70VV$kB=QKH|4eWKSR-z*<; zBvv_kbyk}^jglpiI)A!2*i;3euz}YNU>uh1P2HXGmaDJ(=!ax`-EtZD#}vgyE_`qT zh{iK269h#mfG9mx6|bJVL!oFxrpCiQ1f&^`45T532_idvOnG%~&huhd)!gCb;=-=J zOs+u&p%!#=v2)5zW^6;!XCy{~le-Fb@5Zs20gUR<{%g}7Rl_9u|GIT#(5-td9Lfz{ z8urFsh9IP~#q76&?^k-_C(phD?(e>kyq*lp!mu@AgewNMF11Ot_W#1OZ`gE)X{tT) zUVe+Gtyy_m5LSgr!TO=jnRJMu5Cc90TRahR$;Lgj-YRu2^OwhzHv+6EU|7TC$157! z&bzlq_)kbYoqc&42z9Po8cndvxiUm&hM-$}>Z8iGyI-x$hvt+QztmfZ+HJ#I=_N9X zD-Eb%45+F((c9Wd3fV9lmnI7GfyDGI@a9yUb~u=1VkIi29s!~XE@T&HKke+Sg&f-& zosC>`?HCDfLXuy1v6aAC4@q`|CG4NA%4UfZG8ZxvuO1vbPsJD94{AghGJ zp-N#>#exufM^;jqxc&rL@#B@a>~s4oa#6DfA0f_-98XVt2~C)@fthA`6o|`nS@p#;8&p z%)~-Jg;zJ<-=1|2AFmK6?XqP?fmLJ*@(PQgn1Xg>pU4p{S`*xJ54knQopY0`S)`dM z4jPdUY_s~P#A>fw>V%ELD#ybb&*8Zk-0DlwLP1a0?1gX*EI@V)ampwZ_4TsD_+Oxp z7pbkaHU5k^c*Daux|`jaw0d-((8G41O_6a>FR2hgst!mG#61bm1SY-)&FZ?7g2?v1KiH;c8mN|sSxX#Q ztGn<9IDvmqPdrdGNrZ> z9K`Lnh8r`;zRg~2>22e zze?l4LxG%}Gk=1HpFUK?s{j71ZYGHE{p%EuUS@IvzPWRzTYw^j;%`OpJ44E2Pt4J? zj`o23Cl%ezVkTEwcDdx~*h<2;TecJ|B8^_%%HqZ7u>1*K=s`(ovg2glrtlm-@710S zPsM-Z)4FVVk548FN^pK=f^vqEtkVBvQj^$7)OXx&9Mw4;c4dlxZ4|UzN!)DZk#xEM z%sfb@x8;DoRW`G?_65=$_LA0Fb(K+dloGAE=B4-GM8C=gfalpvVbvJo#4+?h~9{!uIsae7(B^HfY7dRa}|9 zCVL4Fgm)riDM>zv!V##EA9)K3Vu*X593wT zBYmpYlHs*Mu=4^IJg4_8w3k?hJpgHI?Yg8oQ3zCMO$k}}5b(Ga6?mVY)O-Giy0f}& zay!206Ga>yW3lF@-`9Aj4xNBNU;=Na^HJQp&5^y4ktimBk6T?`y{x=kL`)1~eSO{J z62!tWIaf@;%V112z7x{cyyglK3$-Ad}>k^hm zR3{+ttl7F|)pteJmuAhXl9UU>h~Z0G10_1B;M;5ob=_TJ^|C=yTlg)ZE(@6UuCB|4 z?(Q5GtN6h3!dAAzEZBGxBsa}As?TN2xbZ)CPLgxUDr-Qa93PZ+PlWeiMe>Jp{OkBm zhNYe2(bU(}zbY>f3?ynq?4dq_@gNoo4VcdSa~NdsbOb3aK+{8UmsqCljZKO9Eors<_(BO%R7ihQ$q5 zJsX`pKvcAgyZc0ETuh9Cl~o0Wa8&{`4#|I%V^O?d)K-VPF(~$otyW`n)wz1!!9C{e zZkzLyAxtK5aVHJ=`9UE+>zl|IlHfbiq%gBVRLzq}%%%^w$NN^z;!s>;SC(RL8s>p8 z(!l~;zX|3Jwlw~~3z0$ZR~VS@;2AoLL)@tU_4L_r%j4I;z$xWHl==c>R8K0m>6O=* z5i8gY_Lxz+XGz`!i>LxoY$Y#0#R-G&gg8Us4vvi8v>t>V3107}#XnxqRJ`MampB4a zjp>*m8|x6$op3@07@X^3hIH^p;)zGRue!o9sJ};$)MJM?OaC(_Nn=nwJvl)$Jn$qs zJv{{l)Qa#wZ902=z8u|L51<&|{hQh+GGwPl2p}I}NtFbR)A@;{*ALDt>C}Pckvs!O z2tUpB=Yk;S8oMz&a`t7_8zd!-*%!A*b_qR2=4(qt956*_qx6Gh;Q#!xr^l>H5b}NH z5PaT+AmDRD1f{K@%dutly!>N{^rQp~!GDrV#f@$XJS?iNwmFx=;wl5i84`yQ&ZB4? zF2LRqJ0WJsFd!;1h5Yar-8cG1jeqw{Z$WSzF(Y7-ManUXu-M(;zEuXMrHF#~pPl%t z5GQ=$x8VHQ^~Jv^$S5!<#aJpJtP8;TyTV7~)L||Ihx{7N+9O1gv;_98G3M$I&3U=( zy#-w$#5^01Jq9&!NZ=aL>%(ANN{g(6E;+8R;;Hs7^&m1?>sOP;8*O%5RSsW#;vt!vT<{5Z6S}l7ruz)=G~$97`3nj zd{x)dR?Pe0GXF@@C-y)t^5^)kXO#E*QlTN@uV*Q}HYX^Mh=z^Y4f$&5Ir*jTX!WIK zr_J}R#i!@bA59~wWzqUHvj#J4MN{Xgno;$YKM{Nge(Q#%3Nc_Jm?Ia0J1uQ_8**~x zgY$8~vdxiCFt7kH8F3M{^xtq@;BZcEo821VRyy9VdcOMY?k-hITGLWm%pjb&QrKHZ zw{hU*eMMA!2LD z>kl|9(R=*G(V*^ZZFf=|75Py;c9aR6>Mz3DdAwSI2IP52T*&>p;R=3uF z{bee-Y*n8Y+2!W%-{Z5rz0I^7`GK4cKQ}(Uk&^ZeTWyKt^dtzkmGVPU$%^e2Y);^Z z6a$Kc5>oYbo)6ZE`6s|{M9j`6NmRs%Az#l!tMVYd=biHNtmhK}q{1v{tqjr%WSXg< z%j>x1l|z=(Fj#%*0aabCw1b0#+o$z)oy;DJ(e73El7?KXLY8OCvSBwR{YF*HTV4aF zMN^ARohg`tjp+Ce7kC6JD{Mf}&zP^9bV#KIp!~FZ2b& zEDb&%|2`q{SgU)90@pzjV@)>s(X|OnP!5;4WCo?eNfWjd#~76b{fr_Md@m;?{Q7d$ zceYBt1#XUc2Mhy`mRpn8(*hCs=(Z1eUoUw#pcKsL!vrn76BnI*oMo@Nuh;IMBQKwr$(a#I|kQnAo<>iEZ0_=f0nC_Sw7F>guYmCvG(DGDxS! zVNhrUs>rofRmo~-C&NX;Z=+$wa!2)~y$}Rl+Xza3$P4#;(?WHO$XWHD*Qxq zw?Rv27Ks$KP-6N0rNq)P8LE>&({lyukIN|}PXu7OW%NYaN2Pb-WM+EO7(}9Lu^y3A zR0<7t3LEROL=VE5z@s8F;a3`WZG+j!H<}oe)m^b(_`NugYU&J#M5|<6yqmqscZWVs z@{63G{ec1t?~BJ03k+q0t+WeBw)q6C!R>lG&U$V2`(XFJAE&tY5&}RS+&}L|@7Aqr z_d+oB`cUP0h=7+fGjZl^=!_+4lN~R>9xS?`NpSG->2Mgi$@RO+06;}W zA~U$VuF7C?sGX#@D{mUQSMkYeGo4-vM@zX^;j=5M%&yxzN_P?%y=xthpG%?8nN1?? zZ>u>&am1xKuFQIPoB3w+-i?F-M6yCEikdqBK+sXZGc_I2WAvt>Jm=ZX@Z3!i!@xrA zm~?0c!=p+7StyRk*W%jcf96?W_##sh*9%_hfUjzTN-MTK`Fbs!>@^Ml+@7?DjUi@e*p-HMWQs{yy&vpa< z`AM2%gSS(imMmy?281#`-&cA^{!1oH8q_%%Ru6rByhQE(Y`^qS4E)^g4RYA;`WK%r z|NA$-DJv@*VbGR_MA=GSJy(CCb-@ZJjEo<_?Rd>X_o-@brFM8Rfw?CLPgi5>NCe5U z4v7X!PhX5#LAI5oGhYjcpoZEv=G(JD9;PdtFVjdX=!&{L9Z{xD@>-CEq|jEOmzL+_ z3hdcIZ~+zf)3bJutFPw%wz$`9!cu-8VWzdowVeNZp}oid>{6`vm0+S%@ov_-RcJqh zc|BXx`cjY4|8djH@a_2FG(gFm5pA;lc`D97a8cV!FGF0)F3p(oY=bUDeZkI ziS(=&O=>9e=8aDwpNj_kZ*?ggkkKnTR1RZ8kwyXjn+&G@G!rzZ{X&F;7nqUhoa1~H z!XB$0fVrqm1*lOibk|!KnFojyA0*rjfva=)utk8KAirRx{+^72s&3Dah#@EX=lJw4w&BV@qeDQ{@U zSI4@|wxY*xKL}E)0fQigjTFT!M$oUe*woUdahlSrkS!)ySC#|rV+c?wyDY}Ipq=n@ z*DFgJvS$GkBY?JuMqhneQc6SGnpZV7)Ija5!rV@2#&|wL(KIG9 zpcRaGV%6LU_ z#FW4wdes&b5Y_Vl0^@`=B_i&GvGRcJts24NLQd9@I5;@Pzx$ZuNI5=4n}Lrqblk2t zk+0r@5c!Z`O94|JpP*v;jS#&ZIKLg^z$a0w-&K?Mq%yT+DYAO^a>Y47YxnU(U>47# zC7exgpMYkISOgGvF#l&yDw-W}lu)craYy~}((UzP1c2q{~ItPeYw! zZsy2(&f(P*U@N0$9zJp0+ZMveXUvGkDEA6@K|th9NJUpycClTjh=|FsH84Ix`Z4|v zi@UN)_TqPh=3kPMAyzmJ7~O5ZkKd4?7Ibm%7;cy!ns5sou&392DSs-f^}0P(fBOOM zFrU?3<#SfmF)=Ylri5k!=@fIBQYD9T50A3Fm}Hi{YC$~?2cd0q0LMA$HQI|u_Cgk% zjV$ld4CowPmH^d}lAm;@Ij6*dSPAJhI)%J@&rna-G$QmPWby0F)T0++Fwl>W*1-D;R0qMV$nH@kAW(u!KQ);h&gwlv!u+V{l zjWT?d#!mF~`I6vy2C49YIQXh6FfSl_XRvwlINA<#3(V?;R8qGlewj*7rIjUYjDVqh zHp2dwfHwC-pnJ-*kh)qGNt8|y%@#Js($p-^_;E1-D!2YWg&g z8&I|+4>`WSEmc0*u!JQhvS!1z^@{{rL_-O?3*l4B3Yo6@pY`MQL>zkCvZIAYx}rL@ z(G(hWQddR2$po8kKrihL*p9Sc3UnOyf{%xXemuXpNU}U=^tJP>r-(NGa^+&@yp@hj zIH74OS(+hb{tLubH!4Vl?XKL$`JgEKmr-d`_`m6_UA8|Ur1tQT`U9m+XI@@jWbw8? zbm9;Kzcw0{OYl}q_)ajO*Bx%IuQAvQRWUH4s8sc6t-ycI7RyAb-U$%41KNXjz(=D2M7fBQO$W6S)YhO<;c!Juwn5P_v2ncYDM5t)Y)33b{oqxY>xZ4M?kCa@FtcwF zvsNE2g$E>qXt5eTWT0e(^glJJkSLuCjQ6Hsi_MWBumarm&%Zd zRkOY*puKKMGjSXEk?VBkM-QKl8f(rr85mIWN8tGGL=m7|_W3{* zx(q`~a>9OKt_rC>u(G8EAs5ePfyJl2dPI8%ZfSW|Xm#TjLZXYlI9_gh5j9u~Fki~4 zaV_x20+q2(jiDmCT=%F8?AEcTsAze9(Eq+qyMOra4g>roLr(JWXY}pAOkJ(-6OH6o z3|P7%-TG7VR5QAf8qyk@DZuG!Ozo!IN-LU$)cPY+WxpeCHEB2Jv;%R!IM)4j!zc`j zLXs#nv`|N{)7}U&L_$~@Ku487OyNG!ULMmp^*6nK<;8jO=Uu=0b=2>d#==K6=6_a^ z{Q7=Dh7uRN=ryooa>CF;Vxj7?Ix15I*@!sj&@><9jp(&T4il50U@?ThQiI&DqzgN* zxWx!O&?)0wCRzriN0&0CpNZql*np<0#iBagagw+o7&<088mg#1x?c-N@-E->UTu)q z?K+#W{*e1cd(I+ne7mxxN|E z0V$5Y7>^weFe4ivFM`fujyF@rj$_2*+^f!D3H4ydY%k)tbv_sE0^Ci$ckH$lME$FP zO#epurpvSA9RD_}G4JmQ>`P(w5~v(qknW=mESc1FkV^2T2xNr@@wmn33I@zrpnqs~ zkgAi4(?HV}oPiCV)wFQPK!)txD>=#6&CfI^t$1lQ_YA9p;`rVrm>a9|$4(vwx-Q*N zt13%gyRKN)E;Iwou(goDjNXQxGrj0PwEx0Lm>0i_ft*!RaPOb!n25d!)nrg^kk9Za zZYT8t-Un|?gNAqrBm~hS!%e{@r^q<+SC%p50mYy z#>eO9iALM4j;Eelb|=HB%uUCz#B#&tnA;cLkjE8IfZwHLmJ7^#UShNSkX^4Fi75Qv zd?v`ca?PF!y^{SlWO4K!9mM?d#ZAs%0-i5>9h{y&y&%)B-Io(Jj8)$ZXtOXa7*xb)>dG9BGw@#2Wd7^EmYHPTCXsr zhemD3Boj4%=2o*H%}~=>9&v4te7$_CHfY3o@K#HJ%S&?NFIVRL*X=cuKC-2v;Ifb1yjHuOcG3KgeDCbB<4Eurg7(y$1$}L0NX-} zi0kahdtr+snaIF3JnzYzRjL5&>fxMC`tjXvRwk^??j&%4=&Z)ac>OAc%~M${YxY3O zi3iVkJPwio01Zme8FSwevWLJ-tYDsFV2jBH(lr#!Q)z!eIjmK9RY;wbvx7}=Fv=(l zf2rLOkGl(($oOU8mk|yq4H{ZVrA@i|GD*{gCb2GBlCeDm*OsA94rXB| z_RHv{({ZkISo;#b1AI9%);tJS+Y9O20Q)6M5}Tl`Zhx8{{q#)AfObc`30&vO`~R!b6oAX@CoaJvFs zKO}ozq)$P0mY~(2om8Wgis$h>$+pGf$t2QjfwCBJqqkjW0~))_*qg&>6h@z%e-I2jApq>xp>EoD0o_0i8a{btNj~K4rV!Wo; zxI@$M9n>8t$n*+^%&iECi4q$N47)6l<*vbtV|(5t-Z$*~g!p7MbabW_GF;RpRgS^V zWwVw5U*UfKI@mxa6wmI!&bvx11|<*)gSum;Pw;F;9Pg$_w|@$6j#&y61VGMb>BRn*;nMe$6~uJ z5YJ{{kv0m?qh-DHud#E)G5$fV&_{ni+?fknGljstPIV^ zyt6EVzrYn~{zN%MSm_d%N=iMvk_nC?lW|Vg_?j2e^Z?%WQ6^)qK{a3#hZwS|2qbgL zz33E{5h33eRbYvVfZsM7{Nd!FXVynhLka{#tAwO(!)(Ru z6vXC1W5hsp*)If*zTfc%!yvZXk7}u)oSZOBd|(tgj(#IwMDj0sio3Ph&GOFeiN~Pf zAk8BqVjaGd=y-355YF6CZt)8Gz3>Hpd$ z5_+ZthU!H#lA4WhYoGa_$%0jZG)<)8H#~n^hu{$r#}D|xq)R&?9r+8P<;uUI4Q#jf zxc$+IyVhklEXVblbz)rp10MG}H8&|QitoH^^#FY6TQ-I{7?=*d!!@*Tz-lL8 zvQr5tT>%+}jN^MO_S+W`sP=5Wg*wCoKsn%k!&kFoc!A2Q`S(4%B9T zJEq!lDLLi|id0JsY##L{!j>=~=b8m*q5oqgZaTnD-*^^epmt@*P6lVnHf24sUlP{S z*O%8q>Zyz05p2vR+%JP$ic}3)TW_|87ASv8rrhXmijCwGgET~R9mv3W7@fP(<4kk2 zbF&ZvvoM1~DlQAA)9m>@%v0~ty8(-jT6d#zFOr6sFHU5BnYAH86yl#G(}#Z<$~yGo+A=79 za~fpx$aVasPZCo=C{v43Dou=xKY6T=1JN=RV}&K-4blZ4RU15Bw8fcx&@pxzkEpGe z(j-^Ue|)p06}(;jtifXelz<_q{Y?^+2S=L<2Usl?NTC|*pOwSc`_o}Cl7uL2*;`&k zb}R@BI3dj3sT+(=FuQk7X5f}j3Ncc~40uY|^~J;CWmOUbL}}lm1N%Z|D|j&(l(79sj8#GelG6rsd5|8EsY^-Q4Fj&Z(a% z06!A|M{*HWE~I@GaX`@169qD|;|5YgbF-Gjf0^qR$Qe8uQ4$z1cL2zpaCWEj&Z~Oh z@v2#tR2ycoNP0b>Zxbc`9B?z6D$ab}Osz+qum7z0Yy|#Th@Dno8dPmS948!9s|*mG znk)_W0L0`8ZQRg=GG~1`@xuoWDn@Gn$WvK4iTh0sk(&gESL;zemeA|Un!{MwE;isU z1XURvjRZO&`1aTVW~XzDix`oVmH5~;u}A7%O0h&?J~LRs??B?_CPX~?H%*rZP*IVi z%~(G^M{BJRxo(Sz*ia%pcwmPdHOyMG6Q-^>SRp5^vUUT1dO4r8Xd{e(x9%hQ)#XEx zS`Big%WDOuA!`k2bi5bc(Pj(-^Q!+|02GBR4q9-HPW(dtS?HV;zSqykDnVb7nTIEe z=2)QecCyFgzCaK9+z*#mlHO=oJX}>F!h0!&f!z+Go~HV1KDuXHpf1bN(Gehq3)^C| zCX~a{ze5-HmRCZI0B!=1D9b2=Auzs(1mhOQu+PP>_btOKm~b%lZV%fZ1q4M=v4JxZ zuTx#w6F^Lsv!0loZzZsJa6scQg=y#m)Rf?=NfGGq!}L_Z=upUFsj0VV1jajVC@fHs zBww5iJa(OXDfi2!3HM}T`>uN+I#0MvxUhH2T&fEjKt2#J%P2*1Sg1e;!=%0asLaoU z!Xo*Wo~&(crzcPNU`TQRi<%&cT)2h2dO_9n4uHqDW@GYFi!ppTg)`!PC$Yg$?+ZdC z2*ABAqd4L+GnPX3)MU4`M~5w3SU7vU-=dNC23qYpCROK(2(TcUN@W>aZa{0Tffr-K zvRpe6lYHHh4!2>f$lKWz7cT$-mXDVk3eBbY`FU@Vx+NM7zo)-^l?4UB08;@8{7s;5 z>m<4u>W}h49anANSsRQ=qVozQWE89%s$TG(E+YY~YF(kFK>}2L8w!+2;gIX3uwk7& z7?gOwFnKz1AsWd+N9AOH_Y6f`pcvQh;1rh=#RjTy83^X25E(K;)I|{ndM8UL?t_y4 zuHQP>=$>DF5#S$=U)BR%9_DH5x}~LtFFRt_?<>Vh<0N#CeAMrX=H&QM#vid)tMyJ4 z&1205ERL)kdB9EPh9<2aa2N0~T>eE9kFXwiqbhFXjgJnL+#u zNLuM(q<}j?!ZuPv{69Hls=H?k?H{#|dL+NZ51S4jC(v>+<^cy|!nFNG5z?C)JbI*) zmclXRVn2wK<@F8r8}+qpLZ|Y=Qj(q;w9_OilI-t6`dl-wAm46*} z`+RkJebI%5gT{`k$l~~xK0a8f(b3TXgCZc;t8LL3Y&%o|fml~?nfIR?wu)PcXp5jJ z-5mhZiMaiD=ou>`PqmVmW_Zmk6|}6p@jl^B=8qdac=V#q zu7na8_HPGkNRuPp@P%nfAsXOreqRnIg%qeOppU+)MwUg;(}C%302X^N!=lT||5rt1 zsiCJNDI_Z+iWe#w-KxVL@vaTb7Xq1mR0Mk5yo>%>KY1a)23woDkFiWA0ZzV#CQ8Q4 zBv?H%a|$#hfSH}UQTJgthbH@R~E`HOLsTHmMe|>F<+68^d`ixi)-Gimez4RE>bv zG3`cox-$I5ic-Pazv8bEm=@2%oK(H^fn=G?dA3t=Lh0;b#O;*?@C-i0x<*3d9B{&A zXi8pQ+!35xZ4NYAFG-!He0m&{pW*XQbEIo!}? zALP}$DcVLVUC(->*cxr&eS(!WA!-{?MK$1_vMkXfY8FqjyCo^R^G5cA*`n$kELj{o zs)mg{_kCVqy({-Su%~ORvWb&-hT(g9tHQiXsn$0m+Otwrsk~|gNHhYXRPS-;yuTL?v}*@u01Xc z_ynd3 z_K)&@2OD-65GVD`&6_r(CB6^9kUk-G5cf}403e*pQ}S*EhxNW&ksSw8y1cCRxeLkP z_aExhiY#=L5f;(^-$nIy)|j_9)N_hc{Gy}<&O=8nkfoyqbs%MR@OB-7=f!UrgqqmW&0SW8_*es=Ixo|g`LPlr?VEXPyH1pj8;=X7hnrdLJ z7A4uy@nXZc$NnA{_74Nk2r$$r6D_?b)Kfg7LN0b=ADMExN=CfDW-mE|4O>X=EtZfhZPYkxO)xAN0 zhjBwcr5_}NpEH%i7knS&AGp5oZ_4Zrk*PDt|&7@PD9yIo{RaAN#> zE{Tw8*+KDZzgn~eGFK0Vc8p>8o-PxXxAJ!A($oN(D8gCZ6WqQP3D@|Wa*qG$5eXSkta0zREs{HGu z0E$DO>SgF&nkUVBr8;W9P;$jU9*)HA8m!H9(K9#`w<|!;9Jo)fwgf#{M*xY_Xa*g4Bl?xe=<;mkJkl+Z>3)ypZP{7-rf>S1d;?^d;uSr zO#~2IXt7QOLE)kkEm)g@K>-x8S8vei1AnOAPPV=*XwQKb`8*7NNFN*Z>w1u`1c4uF z`z(Jj%N%@l8(de@03EAWi2UH3qMBOl`T5^ZdoAh|;sio|7i62KZkoZW6Z^nMkcobP zKnl+PHZFg=nD-55Yg_Fx%;T7rnFZrK5*E+-hQBbnE%1D=uEJt~95i+pJ>6MB#&Tc1 zQr^qo^ks=9i%WHWK+gg9lwIAV^F-Ts|(jwEAce~g6S$*Q!hRkrfkGN*ZHj&y6(O5YGDZ`9UTAtI5R{0! zUXb`hBKVc%?jfr^$#exmAyX~$ut!{tHAR;8vY+eI`P0-r{`mgJfBzTn(*V9l#g~>U z{fSMAP)&+J!fdXxt%Oaq1ZxNuYc*2pqOqm4MJ7UH5BU6vU-;M}LqOQp42^nHetu~y z&&UJfUzW|V%Zy&)uj3z~Nl&PYryU+ zi*Fl9^GfV{pG0ubS=$x3o~iY&qs?JI5|+U2^sB<{2lPySCveLJo;eFV3L`-d za*C1j%=^zWGY=?V6J|}4_9&r&@vj*J_Nr|JjzvlPt#1791>z$h{StrFMYwLxRv&%Q zQy2l(ksA9EzIM-}bq{TxZ?^dV#FS3HgcNUTRr+7^M&STW07Ae75wS|kAS1L43! zlLYGvk*z|Q{A1r__%C;GvxE1*o>9le)Yr^5tKPEFgb%5J(OzP=Bc34@6lNB1rgg{! z=^*r!K=d?4T$-cDlBTilr$a4gw$kRfeUD6R_48Mp&%n%;INIrUGw$7 zr;R#8&|I%;&d+_!-oF2QVwda9uy-2KKmmQO-dGt)H-s*aabvmMz%sHz(J_N(-Qb)H zvbLjLRX99aVB=eAFGrpjtG`yZrwtv?vnRK@fSA^AW;PiSa>&5~wyTCJEO)1xv?bE? ztfHLA;dFl*IDT=G+kCi&S{28&R%6U#uA#K9Oia952rV}IjwVx?FRiB(qWuc}YY zL8kO%!nlA;98r@GfxBc1cfiGTKyq4I(nCd6RWVU-7W<#Vq9RZqA0HKhjNIIri@zT; zDxA6QJaP}fE-o$>mX<^dy-yd*1r-(81@>b(v|(vpY6z@Zg=iATk6~lEJRlzvnsxh* z>A0?hO&^-Uzxlz7JV>Vh5#IQRsE0vCqtN%!OfQG1_vQN+mX(F|2?d0KAcK=zJ>f5P zK!pt^uPdL3;N@{KoMTyASy8$a3#58*;+)OyY)%)?u3wHzQW3(4=7$g)kR8oYUl_95 z3bcH&He8t-Y}tfiorWy(hcAtWRY=n%W{1{Wrg0w(u=QFZl^PZn6yS@BDyH3=tN^C2 zjv-u4OHU6`SIOIszdH2)G5tLNi2WsR*-sZM%2`jPvRS7%%PQ(Y*7(iQkuo24o%8$i z{3LFBXpFp~0+hCp&$mDe=6nH3(*u$+#2r8A7VsC6zRU8R)yee|$S&eI0CU3QW?*`D zK4gZqj3KM16Wo@j!N47fDxw=!qjStlvN$m>_M~IH$0dtKB~Zhw>#sctmw|D!_jI&H z>W*EjrxE{R17|1M4S^JL7#&}X{FIbFjb8APku3w$e>9^GyPOgUlM2*CKA()tWw8Uf znZ&LfW{y(#6IC&r)s;vQ&|`jjDic>k}zO z)7(Tv_mP2QWMqf_OFQMKWEfFLkfDtdi$}>weV<>wC(m5A+wTKHZVu%DhDv$a82LR43Yk&2xN{~^2+1a}(L9Xm!&@s^p z?)tAzK)7~Q4e71|a;jq!!1q-2&r?&AYG7|Hx@mPl#&g2cPMDEhX!nV3&U#tFQ<_eS z`GhaT<&4#FrFNjh0iKx+oV1Jx*1mD!D(dQZ*L{E8h+kZ6LV>mQ!l|kmrZ_8ylb=@+#>$0rob7U3qxt#7unf1McFW*oPorSV;MP4&jkk*}$#B^EUzuIR; ztoz|r3|;bKbGEvHt zfwF%^p9z4<`(JB+JA^8a5>F|9S@s!t)#R3$XE5(gjLMbkty6L)VvIsn>0fUL%S^Y? z7uzlPL{#Pn6k+^_n0qNgVFzC1*&A1E(ahqG`8*mW04Kh~l^RDSza~~0*n~Qx7%I^} zktw|ZDY8(}v)VG#*+$TiAreDb^3bl8EOB<$&QtJ5RN%3 zz~>4jvE8pMnbhlg6sp?N>BuaN7v*jh8>{GJ!)7a1XVFq)h2CGd&(WVL>cCaaXbu8? z-72KRG?c|xS?Gq#gC(iW+jzhJq<;YLU&d&T%h01OgxQjR;E_|Li+~e)h)KX5`Xfj6 zLj>)e#K^*XEkb6tMf`IcW~=iGw_Hg7A;{1Vw%$oiEls!d>vwRCD|0A;s_zYhd5!^Y z1j5fHH-MB1ThtMt78$bd9&ZEk_N|>ck#Rr@sovTHm@YJQcjJl2{vx_n*g=GcSNsdp z`0LXf)V*ITFtQ8nwme+4`q<0U)BbLsY#+l5;J+F-q=CF-`87K#Kinnh{QDsY1|BF= zu9N>ZThaL%M~HGt+%B1k+!@WhXl@UfOS`FL<6bXqS22 zo(GV{9Xh8$2pha%>7Yxq=5SYp^*11@Ng-@KQIt~!;)We)iu_5({6w1@;7%?Z%<{`R z3i^Az4@Mj>uM#(`kbR)5N_+52OS3X`T)UGCH3d>DMXP2L+Nz-}VwgpOK_TI^ROt$5 zyCP3p^-5|8&p4xxvfuJ;xXg?3658!SDvbTao~Im$Ka9zpOx+dBcaOCd+i{!2MKTL9 z%*!F4Pl4tg!fir1vcW{LZaYfc{h6Mx#rzA1(P&^PQu3cSbw*81 zCnv-yFS~f3MIbr7ZPq&i8Ky!lYFPkq@cu0-&#R;y!t}nk?fD1*ubqqrVY~;6%C??6 zD4F|<+ZQDjUZq@GO1R_zhtPL+6O46T1nrg~3FJC+wR_K9wZ!f5RBs?0;AbP~v~*ET zd+8i_4SO--!EWlZuWKSt5VYk2Dq#*B#xTL>VQhz}9gAB!Sti0mg zV!ffP{6by9mQRosrP>W0M^R962cbC_T30r0{5*7TlDPZKM7`xd>JjnNZpSNq!lz+K z37_ZR%z0g~7x(J+4bZ_Yd?BlDKfrsKejtseQ8Z5%WtD+6&yY4-cRxLyowxuBE1&N} z*3Rzz(IlG7YyMq+kZb=8i@utgD5)=`Iw{KdtSQg0l5jtSE|q+2a$kmOGdg$0UlQ_y zGO>bkWV_Mu182RW3B`*(LD!H>Y!Dff?;>{KmO7kXsp4(6gY9tHXV9xY>8Y{EyqzU-E;fVM>z9Z4!?EI{c*v<_Y@#%rQya;MqXCQ`@m${=yN&_Y# z^YYZEWr;{dpkuCEBS-bDe3DMt(hgQfrkmAHRys~_gKao5p+CQ(F!gcq@FwZMV(H_m zOY2hm{wwK`k&(%zxOgo{IdEeEOv+n3sU~d+hFIh4<%_^YZCHD{JAv(XEV7Ey?nxWp zS#)kSP;>3GGu$6eSVAG1zoD_#GFYd7Y3QyP zHv55hcK}zF0(*Mk!4q2Dp%e<9n2pJb+8@KvYGPEJMJzR4w2g!kiM<=bzx3gph43V3)!7Mj7j(L9rH|;x$=9RT{}QsCuKywi_;RGrz_z56DC0mmEIYc~T2zI^NTVTsk#cm6D`#2@#o ztc0Hb&YPT!QcLdGMeDU?y1hC@=!KP)r5HEv$+?7Ni;6kLe{%q{3F_S6mAM@z#W{G5 zOz@QKx9m4L1yKUsm3Y1lt2(ng` z;q;oeePhdH4;+r#Do_{{xxR#6751@L!Y)j78xb>b*#bip?~jbMRFl-@#)t^W5H;`| z>pr&HGte$>Ev(5=NEkSTI($TPA=7pPKh;mZON1F~#+_5253j7k#XF;=b+=ivn^3XH-eHFO zn$qIsdW<9RzFnR51p}zOsbiPlzM9RhR}y|)KFapK8!b7GGf3(ei3!<5!enZ}_ zqdb|TK1bSqVWzq+phR%k2j%eLHLz+wV0!0D&Ehi}+&sc9iluU+&hn_9Tj*6A=(a;k zk+{jk3OEMO<4pvY6ouW{M2>JjMwDR&CqDQ6dRNARr-KrYs?c3SUp8c+GQ{}6zPDl{ zh0g!RizHOl3dKzVfD`3Jsc+^U<8teXwrN31>H$WR5{zE)HbGwyd9kKUw?OTA zOGOnC{<4-ab|$u}*=885Hh z64Ger!%M`~hXxUUqQF3Hu3qETS`z~v3c&nVdJO19YXEpgcJukTGh~jngE6N+M}Rfc z4_6{5k=7rBwdbj+OZWIXYjMY5ZsYx+dAr?4OV`K@N~w3EFHZA_V+9(8z6mj19ge*- zUXo-GTwi31`S~H#R)pA))tzO*u18DSC-0lui0ypJCU`t}&qbFP?xp*^9-TQjO}f)@Ci-R~ z9o0%rYZLMP<%P&oA-cLdjozq}+-GdB9vrIBVHjSTwLRTxgEba}ZB)rE2RE1T!lHKo z1TO~UPCbDP9li*{cyABv{EA?HKbssp{!$d%tY!M$j9aJQO@d+ejT`Ojwoamo!}Yy7 zy{$CBVHa}&Lo}e2<>sxtxY#zo79x$eg_WJ1Dd;8vLD448YOb`tP+H4YrN*luju%r>K?gxSFvh~g9S()ZwdCSHl)su; zqyMNIxo4i`mAn_$42^3uWsCQo{d6 zA)^1zshG+lFI~h4ie )zsMUls9=)PIC_M5G6}#!3C7ORpqx?DBXPmf@DP9FB-IB zz50=qV{Vi5pn8I8!$ zG#Q=)iT^A}1HlID6&U`vVCuibxBMd_!om&QpWReztyarmzQ_@yKi@Au2S0uvUoSg9 zBccQv;PIv`hMA?;9C9gWM%^Zq`_c8;rbH=nL?$@pmDQpLIJKxzyt{J)yfOfUU2<6UZjMzTx^U(BP1)2oJ7zP2bEmJhnpL(7j zgi4wqTt3sDnST@&#|FTAu73tO4^$Ajo!(iudGG1@e)d^yMgxzhr5$7&on^W4U_uaE zTbg~}vUWbn+tE7jDgW~b2bnTsx3||b0d~g>|Dw_&RT$)4OnDeTc1-_8AZ%(1xI~H& zBQvNrpqV0Ns)Oo)^uaF?p+tbezW_8nMc$#Eu;EAyA3a7>fM_BT@Et_(>91k` z^{lI}VSN`fjnsBe!UcXvjWRDKMWmdBgi2yhKB5D3vRRoz=j4tP&HQ!TO*8FAvPpG>U2|({ljbK_QU2eV7*$h8ZNQ5H_3uHD6?Ay|ke;4io@jOwAhh z-CbB^=Ev8N5R@s#3UWH4km|L1C6ppQ{Mwj@g0$sNHQ9Qmret>pUtc>vyZ}8*+y&|9 zMWYab@pq@dbb4B8LVTti3qee6^&!5-%uEQsm;Q4tQnPrBJ2jtDfwOJ%eLBznu-Z&<2ou2uLar8C>f0u z3E!7KCbhbRGb9Vk8Ut^kotMdJ0WV6*R_HZO!LFC00PJ$Y43rTW(m()t5DEF@NyCy$ z_^^?Pbgo>PCBBKXvva7jU%WNC3WLh7BEW3O7T5dH*;~{h9@2pbV$-FukFn4FEZ^Zo zsKZ_{jj0oqlo`shgqR@L8RUyUx&~UpiD~?>-Y2?D4&&AB3Lo9kynDAG1i4F`@uLS= zu>5-E_X+v^em9Dhm!6NnmdJXnW0CEaRr(fNagi4r7x(gac!YfoVD7<|mCA?_D>~Dm zUm*{+07nW?XOJUJBE{2ESB_d==R)(cDeI}53*v=hW{le#Z;=A_s)Ib6jYSHlH1Bgh zTvVqqwp%5^!_Bs2T(%XNf2bvT-$cw;Uuw%y6M9eToQT_$>1 z5unF3aL^gq$~4p)GU;^dz;G)UttnM3YL)w2KC+ME&KFxmI*Fs=?$<~m8zr!h!k*{G*?sDMh8ZM4ivjR z@zs*)UOAr5`ad(^Ke`0K1;rag{iXNOlzhy<-{wqp5b;Be;Z|WP21z(721B&>KLBDu zoxTb!KHSH`ovfF_qHoQ0TcE2bBoy)@+hB_B{_b)as0k_S>FLJrfA>2ae*0}$whdi3 z{A__R4C87f9R8rJtn8SSa&UNfcnOa9(Xv$lS!Vm*_r53M@pyW0a8N}d^+Jf4e@6@3 z!N}++!l5vAWmQ#IlRoua+7t^h865X!ZnqK%MV8Oqt~x}Mkh@PTBuUbz2W z3WU_(G&VL;f&8VHUc!%l^dkb0=H})Z1#{p|{q=dD^1j}z}-khHz3AGVGnju zDvJc5w3SGI`X(W{gtTn1Toy*-dYE-hly>tw0*UawAT+YDJIHI<^(CIusWd+Q^i%xn zzx^Bf`up?r{0vNJx-k+8nSU)UDSayvjo!F;@!}#<2RypAI!L(8cIeO{+1A$99S(;- zavZ1Hah&G?8CExl9&kKwbECs0>kM*y<6xtVU%{0jEI;siF655l&n2%5cq5%$Oc zwaOBucE0U)nTw$e-pbJvB&u#eWY04&+O`u(-RzgHRGK81hJs6!NzPsYfdY}8J9m<- zZD(gE)kmhLrtd6G&k z97U*YC+t0)aQZrtzVJC@I_Ywj@#Synh5t-2mpQ?iO2WEv5xLEKNEC}1y3eBZK;|Ej zm9uc}>#yUXf=&AtKxX+lZ6UUEv3z#LRVXP zc{y!cwrn9WsGgo4YV{o%86gny=}#1ZsOwlbXh8i8R2sBD?O*5s^;5jqI&FMrBB=U)ra(yi$iwjOS z$pW>Uur1^Ux{$qj30Bt)nkr_#%3z*vWG-67#kc(46YaZ%WID{|O$hHi0JCiy$um?Ml>$nPgte;wYNpnU#IE(t|@Ce?2 z_Xs|C{{!5(b#q2Kk#jy_==%AhX!Q7=AOHA9$N%^r4}C0;ldaZ3R>*R>oNQ}rOW3w; zSy^k1kg^6+n$%I^4;W5NP(7rwqKb;GnywdkMi=zZd7_#ORQSwX`~s=Zk0N)y-DNHH z=029Yi^dQ9eT6%%xD?T+UqE#K%LF7O427T^?K1A$0-f3HXh8wL0)=omOn_2SQbHYD zYAY=*rRYQbT>XBosN+E_m{xnx<9Hxrt{NXRLlB8+I77W~M*2NUl)xH)p#T#2{|DNg z8Q(~Vlm%W|2D4=o^eR`i(C6>Ew@kEuUES-Gn4ZST6DROr|M^>d`Sn*wq>=>{O3rOl z*Uv}8;WsKPD?j<`p+ln&d=QVRtpdmj+Th?|X8rp0Pr(@+1T(lR?h|}~m%~%bLLn2D17?I^ zu5ClnbFU$?b02uItGvhjiSuRHrP)R5g*e~2irNeLyP(5=n3PrUZ|9;?SrP!rqq%U0 z2o;DJCIN`m-$85!R8VI;@F#csZ|1#s_t)O#zkC4gMTluhN{dGQ8kkKR!DGb*CC0ln zVfg2id$pWwHiNIue2F&?9m0DbypOT*3AY=~1%F4^v>wwi-m5Dw|C5}U=ov~RzOfVM z^VuqZtgxA;nF)u(!`W;$CZ$|YU0Fnf>#i;ni3HN=6pG`;sHv+Z5V=!Sh|QVRlKv_; zn}OYV3#pIafpz^NL^?&SyNnfDckgE%{GhHWKP(6@9b!xHvIBP-*+kMDS53LZ+H-zPE;rM^aSjIE;yq@ z)TqI@EBTt|#K}|Q%(Vdz%Y7hcy`OX>70+2oIl`NELocs(Ijrt-PX(hnbv~_J4js2| z;Saz61KxS(ZS)TeKnUsbUAd7~(lCto%~0r#zTx4o`X?v+bV#puuV=*%19(K z#Tc`slo26B3xFuCmNbq`HcK5n>2w-prKPB?uR|mfb)AHe`4rTg0OD(%Pfj3v^(>N~ zybZhWHnr{s2M6yriSp3N2I(pzDo10kZAJ9JcM;zCG_!Q=)pwN{?;AMCW(urwU9oK>6w0Ft)0;Agb zqf9x^c2A>o;(&+42yK4~M$H=Ue!u%X6?2IY>9?I79eDH2LwM)iw{hj_RXC2w3$OvO zG)=z{iA4Uhwzl?@fq{WpDciQ20Mvl75G~k+An+I+ z9fihSjfmR1T9{_Y1(Cpj-E5;(q7<R^nR{>xw7Rzgv1Yf_{maoA{m;PNvv05ZfPCIw=BILvfK$Q2|f_WC__33k> z#q6dEHsgzHg1WJo#t`261dRF?Xl5kv(%BzYh&G|h{bFDud4*#*PuI{b&_V&Jo9pyLERsrM@v7VluoM9M~kw_%#I8Kct#99c+)T&24 zi0Mok6BFa~psropL=H|j46mdxUj!l{Y#X`j=aD)2A#ztd*4>c4B7=i|SCyM5*PyRi zhse|4M$v(np_i1q`=nNS2uv6+5r71p>&VCmjvYIOLx&FG)TvVxfiyHUP@C15DslXd#?On|MmwvaNEbI@pikl{pF6R0=U=Q`d znVQHC>+!oJukst4=jU?ngx9^|5PGfU0*%?a1x8~_eyGrGU1u|oiXu4I6{gZD{Pm-c z@TWiik!aed6N!Af&^zX=X_}ukHa7lWEiEk_*RNlH@b<26fIVh?gx_qdt*sq34C82T zPfwK;qRf(V3uDY+!ku#N?CQa>@ED?7bU}ZW9yQc%$ zo0lQ7X?SG_U{*eU!D+0Angn;#Q&i6kgVi(mHeSt1Lmnr&GGM9ujGt$u~^DNuBVfRX3d6m zfEQuRmR0uCe~IZSd~xa|4!`{tPMkb}iOET7oA*sH%L~K z`-M{!=oiQw2{AbatLHZC-rIna12pzIhwV>_tH?v0jtd;ZZ|aw_vg}E z#1x_thHNH-_Dh#==+GPZ_~TE}+dn{T#GDs)P}B7h(=a|=vu4fF!NI|AJW=98Y!yHr zG3)Q|mwOK!xEv`edehLf4;kl_V4MHwUNLt0r4kBrcqNW7>B z)zvjLv0hj$xviZdmqF^>38c?_3hVZ52$KQ2wt|Y*?&LX-#t>@ViparN5o+D$ibbt} z1y|`)Q&ZH1r8c#yPbQO8Oy9O`8>O*)09jT-L+`U}+a}OBeE2Yq963UrVrpO1e!c$s z>-gahe~3++HswhjXCK?ro5zm8IK=cA?13KGT^)eqcu~p%794X$t9SXhyDnDpp|}Kc zX_#x)BeG*Zc(lliY<)&#PQZNY`ZXN-%U^Kl%|jI7Tb2!7GiXu9+-E6i7{@w zEh?H(@UzniBu{>T%=y!BhDQLCxx!KRNy+%YHY1pjdFm3qyb7V6Pt)|dsaC6afL`Lo z+Qq;SVSj%=bv8{;Ptyutoy+FtW~wdtQp3yPn1i3Az@w|Hi_%u_yz>s}va6qk!(n3c z{cr#6zu}o@o}n%*%t5oVEGFHGw?7&vCdZI%{~98ba__eX-H&-Xvljer>Re<5jD|G` zZP^38q=IHw)H`UL(L$lCql4x=4!!v%zB+r3z=LxgT=SX$IJ0zJJ0A*#{%gaA4ey;l ze}3%24(6L_s{rzdS}K)7Mu=RbC_3smc3MhNC!{QwkYWFU%x1Ib?&(E3nL<1kL*tr8 zO2<&Qj*3*KCy+e$7P6Pm!%0qq>2r5$Er%6y;v9Ni6C(S*jp*jx(Bq{mvqPRyYEaYY zys2|OG&DprAwHj#VHm`gTUJ(dmfF$NhOfJaEdh4>AKIFQCaXF&Go|jAxw~fumG*P2BFr? zFlyJ5)GeoOHi^ETE*v}d5q|T(|2NK_yMRnK2SYdM_mptuuuapr5)Osl+_rt&@294w z2L=ZRS1y2m1Z@>S9$|CU?qo6&wx=L8TL@Vuff~-Z;ipto2tF_{fJ9;%#ql_CW~oR- zJp#GmK}>z{N91l^b|-{2=231hzvV?p4HL%tO^6-*9zsoR;Gu~3ij{dTeFv-SIst@o z!fM6uM|Ub3QLl-|p@CQm^B@zjG&AN5# zNV)K5Kl>T!lK5Rhvr}x#D?a76jdRU_%%qUL{1tM2Jw)P3g=;_GG*@r#eHPc}@B0n@ zty|!C6o8bMBiz(V+*mwPMAvv|pdatQcLcxx!yoYF*JqJPB&n5GqX?ducpY8WJ3~h3 zkI`7s8@;{#w>mmH{-#@Y=dx7*dBjZtM1#erbK&rqBc%bbQYmFQW6We8h-5Muj0_JE z<%x21Ex%3ii(OzvqOQ#`1m-1$mHZCeXqb=IcoL$a5$W&^A2tc0g$;~6HGfQ z=cYdQ;>C-2_uY4?uzTjrnLK#Z)zy*slmd`lyLROPZDqkj<^h7_0kUaWo!5}LaTzj` z4m57~&2yfDBk$X8~EhY&oDWa zp#9bj-JeSl3Oo$md@mdh|7Fj2zx(yM6DR&AsY>o;s{ryC*q9J9YuOX!71ceCV@(0F zL<(6V-I@p`Ym=!oZr;3w{@z{!g39tzM92D&I`$Uq!68at5wYTmIBI_Yjl-yGMC6Hs zDEijR&?B*eSP|LEw#%w0q^zur^g0wk^!4>otL)Iw5UyUmid(mCQL){&ZTh{>%IZe| zvk4i4d{wi57yP|4wFpq3)!yEYk3RYc|Mg%0MRgDLZv|e4VW6$8jnoT&^{Zdu$tRzr zw3|<5vXZUFl6ldkm&THI4))|2GUva5n4a`HqFA8AZkhH=lAZyykO8Bj8lmlb(KVIC z`)?n{uYdh(oH%(3lZga0&2TA9TxI8+Vdz&wA@gu?Ny)#Z(y0sQPoDgnpZA#0RsrNO zF)5`?B&H@_d+oKZ&aT_zQph4mS;-iSFwYNJmW{6N9&~nhBQ-UJ>eLt_mrj9AO+XL{ zoCc3oE)eVc`Dx}i`QL;5?tZqw95NBOYA{>3BKpL02yfi!@+iq*ujk4Fh(AwJU0qFu zbLHR_h^W=Sib{HVdhpd(UlDzX0+n<+O%7bWZZ;Ryd4)hA7v^sQqjgwfBI7bk1boaP!tr@OkYU=VfhISpV-kQ@JydR276>E zKe1l8uNE|)ejholJFP(aEMaO4G8{o)NiE*|_$2=I-~JtE&tE_)ogqzg?ma-9^R#Ii zS0jT@b42}?(oSH^o zZ!g%$AZo^YP~v1@@*u}8%S&<=QVMfSe6WYz3h8P}Zx{x`n|C9+Yd=D3TOqx+)4LJe zEvE(9SW8MuXf{MS>TEVksjO5gMWCU;_)Unms*-717%GoQRQGgIsF7!<~d$n1X zMYWZYkrDda^XJdwlTSXOXye^?-=%*K4h~Y3qpYH$0{iyuqk6|Hue?Ij@jge_qoiP% z-v{Q_5Rl6vdE$MVO3%0XN|qNcd+?gq{Kv|IZ!6t9I;>0BjH4qh@lNLm-adI67cX8y zmb$Gx;HSugWer`w95F+GE{a6oe(`(XYX9re6ZQk{zRJI zl1SjMh3DvW16oNrZ3w6#N}{}YG|YGWDiYbaaU=b#yu6&!T4&FmMR#{M0fhn(6`fqW zc8z$mRK%jTy1F_79$!Ez6bey_OTDiG7X?65Q&R*W3V77}DIif`q=3l>g?KzpvbTHo z?7<5!yg;qGEiEm1s**=VH{CTh^NVJH$4Y2p8YlLFhr)~nnv~4Dn?U!v28op5xS108 zY-|J{T|0>@6G=)Ja!vQB*#V8SjIQbJrfD82iA3JnQdV~P*ZCKg?GV z!WF;%j<=xg^C%4R#Z%2BJJ$VDD$BsPXDe6%)I2aMi*|mtqVXLlRL< zA|oYij~jvr4squ*y^u$IevI+{I-o$vj1X~P@n|um2Owa9RL>*SVed2ADyOa@il?7` znp$R6L~!ohIifnbc<~|uh_8RHq6-ySC`Ye0^|wApm0$Bvn|iIk`KG{1MHpMRZY97_ z+onyMDDo-t21FkxSOei|Je zrHH~GLcE(P!0>fPs;a68Ja+8ZL6JvWTN|atN=iy*0LMyjaXqNfP@x$GxJG>vQo7NB zf6W)u)O#yOW`NN>e$xTIOah-w17}mfpye)>H15Snj0CvShOVCths+}-rKNALud2HI z>k}td2@>zqRsrNOx8ui;t1lzJc~2uuh@ zdEwnrj#6Dxp7^1sqokY%L|EY0o`!ZfLP~Me?n%&9AZ>k=EI8K@?6mUJQwl^>RMOkq zOZ-_^u3Vw+EWe=c)1jy+BUo37L?Q%43LF%0G&eU>twjNndQSyZz8a#h{}J4$$HvtD z>!zEk^au_MY0-y0jzG#&x;ym$H1Q2t-nAE`(P>V?c;8WF>Gu3ml!6IF$rCa}w!7_S9XfsB6cy>5!(K zi9{m5DJw2Mo|w#a{qsK`UhTGeKwAZnZ-@VJ)>oyhFgWXXNJK!ht@8qTq7@%pcwzRiul-_8(uiySC+w20i za`wvUtF5xKl7PbRw900)9u-Yt(9hQ$Q9$Bv^VK%|R$x4i-~o5VAf(72gLFMUW{Wx9 zpRs&ORGkt7u3kq<0AHsVj!ZF}PD%7x5*c~t1c|0;W2UZu5;crN6=h`~CdS8mI);ZI zpS<8BXsZD74YS{jjV0IAm!FDQn3znaN^`dSJ{YUem?XEYpk=}VK2HN+T!NG?vD%ltuLaIhL|a%MmXKDeGv+3$7@k zBRQ_;B&HmJJ_mR?4(yFElrcuB25Sa*JmhmG{l;Oh)l;&?c-Ke9=Ash0ALsu#6~`BB z3OI6ENZ}N^ckib0u|Q)g>hRithiwgKlN=u<8BQdDQAc>~xrKIYxVj(Rrlub*j+(!3 zS|2-i?d;I>DtIi8tpdn5)OseSGp&u0b7oHejgZzD1YVMm>i}jj$tcpWCD1R`!^be~ zNDNrOfhf>u01+>xwOI6{L~kM7Rxl<6<;Up><}SAUf9$2xEol7u$qq)`2%7g#}}kOz<|m zYBS=9J>YntyuYp#3x9R@!O_=#^7n5KJFRXYh6HjO)6qwKb@bw+~~w{J7NBZ z0vv)!i696FU?D7~gr{19xm3^)!e@%Wp%AEe8>xbKi$~tmt?zb>CY=!{kW?F4lis&) zZSEMN2k9weNz?_mnub6WYSmBh}*f8=cOwK<1hG5dn4rV)}qgT?D{lLU>x+Zmn~v;FD#+ zkq{VBgn$Wbx@*JZi>NFa9zE~Qsetm5>i4;*NooS$FrLbo*Yrv%ss zP*J%uM0WEorGn?RU@m1WXu%_8!hHc@(zCS@&)u;7dRo6;&x%a_L!vHpa}C^TfF`CP z@VDF}Vu+gn7d`Zp&7v;itCx|kx7_ui>>%mpU+EDEo7x4OpNOrrvk+PW)8KJ3QuyV% z;Q0=4KCwjQlil=)>iwaHNA!9j@P1nN!@sH*3a=bLf4Q^e=i&~-kU;J*ym9W_JnHqw zE1tK|iQ`Kt^Ope}1ymIR4THc#9Wpd#RbaOqAC$6jZjGm8(HsqmP|5Rj5 z)U0~Kvu%avItphaM$3dV*|x(D6PjyBh?P*=C-r>6SO4GLwaVisTiw}%^YcSZt1ZBg zK<+TCW_N9)v9@b`{3XTdTEu2k_calEh(&e_v!8Q;hC(wHoM6FP%6K~kK2|0?P$KN} zfx62~XxNUeca1@ES&%eAy0Q$tvTO?KHgFLqFL7&$1rD(R=_&ErcVBngNY0)D+Uq?J zDPfyx>YoK6SXKYPHBux3GnaHWAsp`zUg|L3j2Krm5NqY?9kwsfL@S`whq|IXycl@i z_ji@ck1wn=uB=88KQYiVa_3-3Aa^KcmzP^Rt9Wg1y|{R(O{+SU1F9p8CwUGpV~ zG?*k1pbmv1oT;F4DXPdxpLlq@2Wxx!JY#sJ@7Cn zEcDuOau>uQZaqjIQDjlX(gIp9Jcae2{s6kU3e#WPOHNs?$%w;u?9mnAatgc>!L;W7 zv<1A_$*wUn3erm17G3J)$S6^SWsm4ZPpKak3*q+|cy^(+x}tB?(9#g^8AAfO!?Bn` zE;Sml=le^ZQnNmAnG`k2j(}7_RO}!1%I?v&5SCJfE2;HeXlfWAhPp>6Dce?w+_cL5 zp11+8Y)9%(?Lg_+$DwK?Ao83d){sDMO}Gt!Ql>kvd*UUup7;BI&u{{Ojvg9zKfcS z)0@p?SVuVDYR}s@yU91WKjfzu+D_Jgm9t@(^d6!oA9DbS1qPM;;sabkeA-G3n*1YS3QLnLxrD& zp8C;f&3m>mGIeQoes(CsK0pi!WQZQxmPq2%53cU1m(NC`&x=rl>?W{cWKf_;QT9JO zI@U3h>n^rqMaitlpudc2R`o-Vz7!t@3o6yZjBuVr-W4(~1i%{N0rRbbb zYEBWwq$mp+Du8(8$bkX0n6PAu?7*cIxRMH*VrNFmo-#_mAt@-jG}nt7{aNY zc7iyUF2_*YWcz}gsz9HwzSE~l*PB@X#p7r{^%L;g{CiV!s#g5Cg@q3u}n$ zqTOyw8)#G!H}}@-vrXSW-AVPOSnGtrlh6oAIM+fn-CD~jp`jJ7q>QsM;jIWb8YwV1pQ3x0EJt#X{QO~vSX1}wh z3A2g9S=ZM;mkQPyNW|(E1yBfg?SsE#54_rlwLo>xIgCQKC#3H`n`TnxJ?Ht$Xg~i1 z8jt-s)-PQ&ZQjQd!sF|NzirwY$C-q%s>RF;xvt+$vm+ax{GPd(6mjY)y6lm9yz1AU z|4HM@uuQ%UF(i;7ej^GfP!sW39Cg2;IsFgulZV{l+{O^fOsJ<2bf8Ub zBqNd6x-epRziKm1tVg(%A1!zAR~;XZH9frCA(P5%i6BzZ{V5uQz0mNfH_M79}*1Tw^LgkSJSi!2W(I{jBH@(2q;76LP2 z#3wvD!T8iTQ1S?CF`=ui=%Ud_GR1b!4vUFki3yhy!dq>|E0-7Xt5a{|3w-y_zK`!c_YxjoZsS*x!nxSP ziWYQ5pviV$WlO)X9R;lVz>zxRYcq_0x=ZlcaYD&cxDo+f&Crl+^FRU=M5LZ_ii6YT zR~H&lYk+5Ci0j~%tbu%b$ByEeB&m0zsFtQ_)l(>XJ_Tk1AxU)tL`2jsc;5O*t=9al z`@huu4?p>n;qK)FqU8HK64sv=;BEmGtlu?I^nimU##g2o_tk;(ZNZBxgyRi`^Buxs z0>s)$c-w}4vzR0xbXc&mvWhFO9>>|aIlS@uN!<6sFLCd^_u|l9hp=ncE=){JVr+aI z#Zt)}$91h>^H114N8as4POeWj(qZHjgzpqw*Hbd=saRpTN0`=drZ5j@Ss~`nCv8Y$H;(sj0FOOcezC%7P;m!M%0C zN9&Bc%J%pw9&ot>ywU*HHT9;{1vsD?pIKb({`~V30iD$){Nr4-89{wFz;fKX{BgPd8!O3#mq5_G=+J|_q?k`-@g=k-uZGR znZLXouX$dfTUuXF76$#IZV~LLO_e&j@wJ%czfT1p7eR%MtUIu`B>3tS<6rF)e0)Sq zedAmkcyZOkv#TCnSpzP$v}x^)L8j2&n>jZg4h&`uW-)1zVxfqc=_%ZE&s{in-+efG z^gi5u&)wLydpGJMqX@$gK@cDaL-_tRd!K$i6wVul7~)JK`n9(16cTwSfmr1Lko05> zi8h})t+k0Tk|afv#Avr#SeT#3nKNf_^5jXp@%jn#@#4ixSZy?sq^ad-&x2=DR313{ zqbfCC9wkK>3j_y?z{f^_hem<>>w?`yjhZi3Ro5fDyr%Gr6qF%|R2Xdr7GlELw!*8cgqIt@>+526m~BnQk6p~bp)IHz zt6Q?35{k$WNwrkLNWG5f>1piUy9b939>n3php~76ejGS>06TZ>LZMi+t)Fl$pzi{9 zU0voUSf+X^qNDzw-Vof{_XDv{R$U;R&?+q?gJKR$J8sIkiI*n*W)4525?mool~AVNy4uv;I(48`u1;s?^`i$1=+1Ni;jjC(4K02z4@JS7N}W#;vi zaJ5S~(;=K{iFurBD_rU*T#X3Lgb{O|PRYohvn}u@9W9Rt^;!*+latsnvjfvJ)7Za% zANFS?vSSA(C#Nw!K7p~ZQH+d?pj@e#DCL88api5K+Zi9W9u&th)|*W<*4D7PvV!H6 z6)Y_-;Odntn7uNG*~@d7yE2FQtMj-zKaZ8=WptDNf$uCl@Sv2}i)OO-yygQ_px%tU` zu@}7CZ?aO<1@M+4JX=t~Q>Aj{)%)(h@8T=3KGj%Q);xfhx-IdQAn;!Zg2UY;`5y)J z9U;_2Kw0!r^%cG_!T7!1jDIl2D0(*6t?g8q*@=3tz!oZNHQLbG0s)+A5#DGhywOxR z*V34e#kkA0wTF}~iI^R+*&Jklls6NKQYaM*CNkNxXAk!6-G{w<_nOal?b?m$nJLsW zA}N0gS^hf_JS4C; zbU?ttvcezk5qxV0<7kbMFp#piqNhzyQSZO(+`HCHkXcC9HE^{9oa+cqtrJeJE4=bN@+l|7UZX>rrg+w!*PLD-%LV=c^V9y-;g*qtN^qBugQ z-8K=%^70ZE7Z?{C>%7P;``#4k*>@0}s%nOvbl%Z{$NXd%n%BJm~{a*Iff>U$86aoM7g2MlC z!NYuH+roWCR@MZ#>?`$ygZt|L{=uu!`Tuiub$f@3cL26s7uF-wBjvyT!TPZ{Ro`Zk zuY>5IVijbUnuhe^)<9VwiWrB`*#*9n>4#!-k@v>e?o&tMeA`E zKJezH*_~lA{OP^-+;irQH{ZPD+Xagy?2cppk_heXNgaxy9sozHf{%{?dyD-ZAePa| zO@g>_Ge`Jha^5}xp(m(%FuCx3W#GOVVYUNY>;e}%gi9U5Y!{g83Kn9k^xI)V!phW6 zY?#T=*`nF)ntVK(z#^7#{sK@chNx9*sMqVL)$6F%Y8W3IHRO>!Mn*?XK|cGLYPEuD zwQ3&aY6azT1?5u7L@uUS9|Z6-l5x9l<#ym%^rBSmf#Fi6MDLq;@`%}OYHaktE{fpn z-`dnBvPeR6iZsgpQSR#&mMwraZF zn$4zpG#U-_y>@oqdgvz$Wo-rBYYP-m@&t7sm<$Co1;MVOU~h@A-+Tmi6d4nt{h5NY z*Fwrx-zuN`xL)N+djC)6=d;t7k<$Hj;O;80k`g+)XP&NvFwKx-^9$Vvo*4~Rk^MKs zK(Va|KXC_I zoBhGJYqh`lyY4Yze3A&``K_;ru(Jp}I4ZcSBB%y}4sR?DLj9#wIGp}m^4?tZi&WxK2o=&Q>&qp5lXpiqMCBCY`ed* zhabQTeA5Bu`JRbhJmndJQ9Y6&Qya+Z6ur`%>xwcDwghO?OnC&6+9)E;h$1y9q$G)L z3Mz`w%^uw@+MPC9&6XpC23FS^h7_{TvyTn)wKX)>8t8N*^SAW7X`LG~^sUJgxkz2v zD6>R_5g!;W2qp`H-9^HIB502`+<&q@XB9)OygB2Du;X(?WPGO`JE~RA1 zBxReWBG0gsf!L0%IbW&g0^k2xsZjVuWsJ^z{I^y6Pyg42J0?^v0IF0gJ)ETS2dS1n z6r!3*BSE9A6uvwK{8#%^d}`DdO*=X#koR|rdV5|c+ij-K#kgZ5W|%bBO2W7r2`+aC z7upKvx>s}VkdqCL}Q?D2eGh!g@3O&;(v9~PPQxfbd0Fwcs=GmXOx1^Kk2SYy4Rj_dp zWSfla9xf^mE2-e;%YuJwXV3WTg$v(GwEm}BhU{vB_w z5}JszL?Xad4V+5|ZzT$+65wniSkypd<=f0nWc%a6im<3W?+s6R&j*2erc_j~7Rn1hO+?9zR%$2Q7h=L5gE_SlWL}4)kmSbQwwT1e2_P%Nl z5xe$amcD1?>pc7Qm%NP7!XPvxVIvM7eim`$kw+eV`29pgz0Xami)fQ=uVe&bNFu-PaSJu*=vQ7)co`x|-$Lh_DhfA64P$daA+Kmn&(z`sAO z@Yn+3N>|`}IsG9~q|O%t^}o%G1mA6POUxaR7Tf;II$e6EIH3z{?)+ zx{FAfY(sD7ewN*7)51K;B73#afh47UUU%`?dcAgLtXf++KRbKdN=YBxF&P|RTRI@X zCs^cY#v67R%}TOAPzOFr!i%C80C=jL!Fl5pD7y_fAm?)#< zvH4v^_FQBevPgQT${k7HfZ~U0eU@HFW%eEyO)Z4*$SC2|y5LIJrrTg5N(2I#Of?@} zYNdB2k$)9P?}*LacErF5a;(lnI9Bj%FYnmZmJOle5oY(T8VM58A5_7 zruTNfvT;RA-+qtWE(y+?bwmlO0b!3vI1&(!$G{7b;FXw6grc3ItSkD3BC?N!Cz)}# zW_5q)dr#S$tOVRo$B=|Hlj*6fRC!iOQ?eG8ODC7PbBS03t!D$xZ zL(fgvca8g9)NLMJfYf+Kpx4eBc)+wLm<#}25yWg8Cf2oJH3n7^g{7EXv?3-tSxm`% zY$W#4(t>W9MK=U(>{DImSU!A*4HXo7QC#1v#B5tcHZ7haa?R_y?u&Jw=K_VUi?HwS zi}V#wt~K0}T(+gZ&DBQZWyPqPbd)9Du@EMbm<+|dhgq~SlRXNIx-TdPFw^3paSr6( zTt~L4N=?U?xN{->sxTWc=jR3l0IqW{S9i`fe-Di?o?WqnsHq{Zd>9lZ);N;Ha+k38 z!jM1)hk+Bw-o1NEufP84ei8WuAiMMXBn6@F3GNRCzf}?($;d-VZwHwdB{yHsvgxzj zTS7b4NG~H2k8n2;CR9cy3I_wiE3x3!2xw#rir7WbGn+T6L(F129wXhioHp+DNS>{Y zRbLq?MVHUdCb!ivQ^7CIG&<3zS!7Q>v7JQ**{=2Oir~JwV5XRnw=J69sQl-<&4%|D zE%z_7-b1dP6eg_^dKuHfRQh)pfQZ=?UR#>Xqn!%Y5<$a!R5sO>+J84wg=Ru%rDB>a zvS_8Fvxlw2bVU%conc_6jbg6NtGa{i%l`MD5Xno&eXp%wHza4M{6qiBoU?+79&AxN z`xqKu;Vc>?Q?sxSLd6FvKBML_>H!<#s5=6w`@pFAx1iz)NI<+-1V{Gos7;1V)qZKY zGfZO*00SkEsi`S?>(t3no#?|XxG!7Zdp8pUdwjtsiom@AOjxa5x(R(cIR@{tlJ(!d zdEP1e(NklBY&UWD8QnWNKnIL67{XE8&@H18f>*@N*fV`-QajG?G$=I zAzSDjQIt$OEo}WFAQL@g8?1VEZ}@^*_I;lXnN&U7dRs7&1&ljjK)Il$Eqt>hi!?72 z$!1Ng_xk?a4CI}=BZ1)VT1Fyb=7x0NJgESa3_i5D9v_^pUg_wQflYuU{bxnga>V;t}V6CPnoWrPB} z5D{imGHH|44L4a4Iw=XVAOGw? zoam2gmOUcI&fJ`Hj|6*)jE6_rq(8Vnx*O2(zJpZU8@)LylHNRAwipN5E*;sbv(adg zx|s`YA`LN7N9<;YGQw%6FyzuT1SEz)5++2NO=QwV_It{Fp0a{wyGzzA8xz;PqqJ!% z&kfj_?k55Qr4V?8&|{PooBV&a@CVA0VNuzlcgaN!1=j@?ng$X}GQN$N&1?54_o1gv zT~R4)C!=V*Yq!+4=&0#?PqX*#P+9P?5y1;93fUdj*9K7&!QLoJ?wRY-DPTw-gTO!u zWPN?z50%=<%=<*7Xj?pNEIsZ4hkU^<&vr@44bo60$#c2{QKm|aQ4%~55~@mYBqZZ$ z@JcM0)iC-_oZaX)O&bU>%7hXiia?3SoDp5 zg;>2NGZCM69hqT2G6Z331ne}O?DtKVm79xU$-C3~j>@(5DxlXSkVPTBd9X=S`EJ@< zp)XbE+HHOJ?_B3C*N-$AsEx|DeMadgij8~cRw&%LjGDRkb{Ap979H3mi?kK$2u_g^Wzap}@72KF%6&vR4R^|8Yf?D5#7 zBgB^mo#VX(iY!)7#7pF7z1-89Q)W9bvvEWW~co*4f%`~ zJt-!)Y3Bis6$GCvGIslRCnoRO|Ng;1*l3d%vNv6JBUebU(-R!@6{b8gyS%1LX_#qQ zXSM2X#%wcG1+Y&uAB)rEt|*R+frpmz!e+Hv0H;7$ zzs1~GwCDj}nPGf&8rWa7x&GYK`c^=%cZpnk@O5fuxiZ1#2f2^p^tfz`v33*Jn|_vD zblkV&e=l$N#?R(f!0vf_8en9ftM`23j=??WH4(2n1l3&+A<=6DX{PNY`0cGK-&!_nj)VlBhDjAvJU~W<7Z53kwbAR zcX>)pdP*g=YIUvEZYMY4RooDe=hZauXv+L3iyX|plF#4lDiD5Wr{J^WV)E^&D;VD* z{ILnw8?(Fid++xB_lx&JBG-PuM<{fi-@nd5-HPwQdqqDHdv2$fGU3g2!kISulzgF- zB6vz&43(T{bkpSlANn1Kfu*pbMLOInk!jKvfdCss76=IAlLB-8*Y-Ld)f3EDp zog~{wzuOb+a@|_LiU{Yl75UTRr1WeDP8O_;iewMomuh{N);c4SUyO{6y;?1oFWq(5 zUF%P5GjXl@*w|>Z(YTM9cS6L=c6O`hf;1*LGPO4wf&R9EvZKod?X0gbR=Uqr0#zV03^nRn7YB92WJD6Xk+w-+;|oQN-!BXPMOpBff?#(brX$9bH;qc{rdiw( zW`0m>{YR9mvb-1IO`nH;Nukwf-uA-9+Tu z<^sauD)3+I6dbFHk;_lDtNYM3h2bHFxWVYn_duCu0!3`vpXVdNg%)tJZBwb9XNy>* z&?SWv3-g!XxVqXL7Sgv615078kBs=C)|IYKi_CeCmG#6fn1{HIr@=yUy)D?mtZ5R| z31)KnZKD@N*O13GzYi24vR6c=iDjSEB#!-8kS&n2R z2Otzzye;@_lvwYn2&%rFJeOOni)V=20pdiBvV}^pr$9JV0iIa_V(p?3A!5da)_V8K z>Uu3(T5h$Tfm;~^BapOORbI1MRHS?&wk@@8z%Nu}9RvrOZ{M&yHG6L&qyN;trq}vk|fzOp~hmds1l)_ zEPNOu_3T#F?zKz>wtaV3Q4l(FpkCkZF!~r`t3c@KjAi%IjsoF8nNSXaX4kg)dN$G@ zVWwRv)OfL2yxQ${Z!3wR?To<<5;Bvg>?~ET!;|;rdu~A<tfxUWj6`)muDeK+zDLu@(Rr8gQ1_LK!XLWSi>8C3w^ zE*hagc59X$HQ=o-FeH#I!oUb5FV?=Jqt>a94tk?YeH{j?47`~D&qajiyVh4^%@t-; z{}9-3K;JA&K}2>E(WKV;2s1y*S|3k*|5xQOd`)S6hC~*cK@g#R<6GO??RM~k$9}vo zvCq95f`DDVIrOxVj(BjyPDS&OH-;D#WMkeXyT_`YU|$iqw?de06BZIc{N9|)P8NOm z^9K(6>hamxsL^N)`snWn42(cLzr!g@EFxyNurV!bC&UM-2jV&MhyfF2yb>!s)dgOO zfw|O1BSv%ReeTMQWOv^{q=*DWv`a*GY32t2e1=3$L8+IC=#_Gza7xuHbIHoWS`^*5 zT~!>%VH^6O)_hPPW7$h)CS!=SqX-{Dw|Y=xC9of01GkVpxGYd9P!=Hw1P} z*>lIz1>u4wJlj=xqNDIyOlYvR*7Oy-7^zR5`#l!?a}w#Vk2q2(12_uca7HGi)W=j5 zy(Cfhl|msrCBm~|SZw?2>*Ic}WZ~|seDc5Q5e`)?k&qIjhzw@1LM@BxW1Wr%Bhh873<+c_FfanScu}(!D!_|i((2ubz*n@l$p zuuqsLMdTwyc$*V?HT1$?hC(O4R;!)+&&_7zgHr3Gk3Nd?$6p&+C)zE>MngQ;P;cbI z4_9mv-G>+g8DfY*;T@NsSXJU(1;$K)jLmLG_r6zRmYF2w$q6%8hR)U>@tudk)j*P1 zYaO;lr0vwE1BguzSu-CX*+SiLWkv$U)HY7Uz*8NCXCmvL-VyEz?QKzKSoXfuMWil* zNdWtW^u3z-^&b$9SIUJqiecg6)OvgIWcvQx*hXJo4PWPEj9GSya7_d~+op;yI8+r= ztdu_ezF`V$i0uMjS#^l%A}|^P-A+CL0FNQrhG?dpR!Ts$I_P7)6EL_MND56Mx*BMM zCE_;CkV{{!vzS!X&pHZEb`)l{AQ3il+a^ko>nJMS{MEIVMeZ!J z7m)i!@DQ@Y>_jgwgyHcq;H>t%^+ZM8PUknDtY6|xG3IoFS*EgG8~P0PMgo{=->T>Q zBi^U~$`C``OynLRWlu0u5M$H3(6wTV&O@ZCnWtl&RvyKpcmmr?Ht2T5;0OeelJ6%X z+|eu%vy{mArP=yJOB7E{!Nz<-F!!O}s%3Sf z;BFFpMF6{4WI_aY3*-?k@}?IAFO@v+SNBcO8?RsKE;OQFpZV*0UP-i`0?|0kq&!^r zmG#Qr8IqA`(J~wy9AbMxoReco0Xss$bSQY!s1*0~`AdnGne}#Bc>=H8aw5dHA_hhv zg{dj=L^^)A(|~Y`q3cRu%F_`G*0mVtK)$v({ULKI)F+r|WOPYPs_aw({8gLqY($vT zcJcI_jET{@qCqXEgxtWw)cA?Y5R?Tno$_R_6DQ{Bg!qNycgo23T1cUdA^W10#^tmtPhd8;f+3F0;rA ztaGgOc*_VxfR5M;L*+JwHxQ(3O=D35&ql!CbOvp3jTez=-z0Mk&CzSQWlw| zM)5Zv-{;(+(G!sx0fzwDrL{byIk|LhdFhRkr+#*&+j@FptUBB7u6IYhYDNrIflM&C zVhG!j)Ko#Rrwo)lajnMWER2Rl^dYtpoH1A}$VddJ_`q6{->Os~Qq$tmL{l%7Og4O& z#=5B(s3&@>)k;I9)(EmflC6;&cAjgZ&=f`rF(Iq;xv+guJ5_OxZh`yvG7BE-DEw`Q za3Qe;`+SH{wt;vDEC{MxgZR%4NgM9bb(%i&X3Rz4Yq z!HKE*#Ou39CoW!ES>6-3yOTOee9Z!3yRVxH1$&BOjI+>-`Gy!`dqJG~L)EkC)=`hK zKxA~WvZX>ZPq60kwY7FeAgjX*`Q~Dv1mbyK>QS%;PFC31<>#pyg(d^50y9)tG9w~4 z$`9RI<}Pc&3lZ>GhwxIQkbn`W{(?PS6NYRtockXOq3DTG$30ROBPo9+0xon33yCs) z@Y$Km^$wl4cTohpSa^q|a!dqgSj%hY^x`QN*+~lfGqN!g|6)lai$G=yV(o+Nc-Bx* zXo&3xw+xh&U@Tya1!S(@)L8)&qbw{F6nphzv3RB19k^@4ZH<8vNZ1LrU-nm0(n}Cd z;S_nZ2qYtrh7l_y>(|sTm;M3P^t=FeuSLM0uLH+pg;>bwKAA#!{$yrw@W#L9 z_kO$rWeRv6?kq{aJ)fSjDSR2=@CWLYfH8z4;f`nKay&` zCr)*X6p@Hj014UYj+K0v2xK&Llk!%?WEh7GvF(swj|ERK5&{!Nptw%xq`mL@%2U%` z;7t??1vOOWy}1}DfoNzQD6A>QB8b`~G-?w`j208tSkPgyrZMDZLf*%Umj9?Yqbi@2 znJsxC68xx5csU`gvz@wD&X8I*{pQv&2)V^vvfY$x->nD0BV&TEO*6hUX=gM-W!o1I zls$ZMgm8Qvcx6rF^(JtxLs&~(I1f8DPlg95)Iy=8Ttkm2?ek_4$9Msl2!Wc5z*wA! z;;{HJ#I}QV$MhAM`(ru)ifXe@x-d_~Niq(k*=?T;`iO6543t3Du`c6^)2fU>Sguh3-lD}* z`ku^~R|ya(G9>ZPDB&N@Fuph;*i&*f5dmsGFzz!B7J>U}gpbq&uQq|#8o=8v#%xz% zIf1E_*kZVC+hS8@5|`?-Uk@_^34yVYQ1mt{3lBMDi0y`rPAq#LWD$AM+nCK_W~DWc z#&J5Drh~KWzEdzz0?9B|DYx6qiz%lI0@4v^u zcVljqNJe}bJ41!(3E-hIV6H28V@>eORp6Hmg;OoTT1-Y)OdC@ydB>~s6)+wMY9S%; ztoEr5h6o!XCqoReU0??X3j|aY*^Q5HFH&|%?j+H55vu>|fBn1uzx#LpY0w9JTVvn^ z63*=D^6JTDVSR69Wx-t);GR0+)Vh6~XcFFT5tiePR%jzm z7g&9v9-lfg9g@co+Yx!aFjQ8*t?rSzj#JZyMMRz&@s%3a>y7eM5PRo_4TU!i11FFR z7yg*{jg78q-@DAIZomx>W?0d{tOhzl2n}&ue}R8z$=iPBll|S3iH|3{8Hre-Mh|R? z$GzXp+k|g2u6+h}`Kv4xzrQB<#ty+hnE?)$ZH=O<#ne135lU{>Xnsfah}6E2EJ_g( zM2VPmT}CMTE5N?8;0xn01oDHcz^TyY}6+HkS&XiWnzig(j2n^ibGHalP}mZzl2@1ev~dEmk_@;W348&H!JX z0``{dPsE%ZALH7-u>M0O*KL*m1iL9+TPv~MDgl5IkFcvqm!yp=H^q4NA0Q`c=Z9s-{l1HQA9 z@!QjkyDC<|G49(k{ko&vFl>~b#6u2-m_~$zZFG`7XBN?<-1mpc2x=K2#iDY}2Kiqd z=C6j>hRCDvd?LN%0cGE&V)NP^YxdGqm!m|NJ1K38HSTuBwm7*ykW>U$72!OA=CeS< zBPVIXgmWp-;&)%usBfXoyRNz-cs(XO-66c45ZWMDG~c{H($-FUn6M`!hgv{*XoT=P zI|bjIW*jctbX98n;H~@NhdDquk#5dK_BYp4!BPygQ`xu+m2KUvct8j%WZ5I7VfZn` zHUzo<&)e=p5>ynN1kc8%iUlPWRBEz4vMjeNwgrLIqA1;SSS=H&3jpSFVa04W++pB! zqOi(#ZsB@mkp3?PMi)on?L^^)Na3~EM~m&tq+A(&03+u4fL;QaZmZ9a3;xMY;470> zRg@ihkdWy(yAkVmey&&;F^ieNkIke#^;(>>$fK;pbgJbo!hNC55JPM)$VM2FU8)5o zsCaDVnfk9;MHGfg)k>s-0Uqsbk8MF9=Kvf(owf;dNfBLCpeU~oceC#&gax+q44ZJx zLhF$wnN?PJB_<4KMQU=4X=h;0sc`LYYmLuJ!gq1t@EDMbM(RjZ3Bq7QMC zuq_BA!&kIIn;eB4|=TOoU6CObw$cb|BDhlJnMEauSKk6xBLHIIjsW#e}md z5V5U$r~$R5_UayNb*grT3g4Kbo-_#)8zIL$dBl_t{N0!;jV74xl+<|5OHU>RF5Q>i zc!9F2zI~g!VGU%6Z3`jkbqNM!_2R-!D_Is{Qy>YK5-r0N)=k8=ArJ%KUlOIx3(-7; zVkI_ZwWbN@65y)t&px?D)}?*O<);=ka555{jBOh*k@Qq*w;bf=HP}wY?+h7Vo!0p6 zDKTzpY!-Iiv#xbw4E?3?5B>ZmN^|M4yuOm>wme530a4$DFpNQl*uLl&+Q~MCzOtJj zPxWtljGmq!CrLSG-d=0mZHDc1a^=fk{_>*ldv5_aEuxwV)=(*8!E1@an~5^s7(Tnn zbY(+Xo48`EYsT?d@N^frl#;183^INd*KsAJaJUNmqaA|Z-=lF)O%M=+%@pCyeGBq= z4_jkFZ^j0B1mf8tO>a0jIK(XlJFyg!7-{!_dOa$JJ~XFZJBbTd(sVnmakm||)ye(d z_r4bw!tjCuUWUSKzPx6;__q?mYq7$-_6%uwl&4$7q+hbdF(ZwaV}(}}Wl~w>TKxvG zFlHCANn3%iw=DSf4#Ds4VmvfrB^XQ)PPS;BC*y7g@>)`M&a|W2{5#pX@hRKINpG-n zh#|H!^5&X6-D(TzWQfES{meH~W{T3tYqB0***6i}ia;`~H`Z2zK)tR!IuDXgZjiU8 zfinr=tyG~y*4CBW?3-!ZWy0B1;Y_NqqOC5ux8;{kZptgj$1Qe*z~@H=U!7z;P#06M zX6c(Mr<+&0roOhvjHo!5N^~hKZ7gkO7NzyaGD5BB4XuyIxeNai;-ZJ#Te`R#$C|rK{Z^v?`*1&x*fgR zlD?vzLRYT^*J|35(dl_`MDUMyi0Qnl__iBLtTrwn3=+O+M@IdP20~7cvDYbYZZprB zYz;BQtp`gL9@!LZ7Dneyh6<_@c)`x;v3mK@M+beJw=cFGfxN!3kWPdna|E1Jpg90( zF4?iD1+N)3T`P!~T{zhS8G$g*g3~Em_1@kK+scJejNgWdE?CTj_ebi$mnRvY8r2vN zZRBy|n{}^uB#t7S?}sOOq#>JJdR5WddIc1Y+H5zg6B5WDbo1URb+r(?pBwy777to`jJ4KuOcV1Yva zuKs2(jhp{|#$O=;dker9Cm3It6igTF@uzZg>mBcaBZ@=_Z4EaEvT@APS9lyU$Pl+8 zHWU}6Z!XMi(hAISPs04WjadHiyzk%jxg9&gK_Bn!hHXh8+2%R${4)@o5@02}B0Si9 zX9EhyQ-$-|7Ra;53KP!83iGKlgL2AkMYbZRZ(4{&^-d*&FjfE_nb3H2QgEaSB=C@k z=e!ZO;F#RQXF?2xxzxj{4q+}LGufZlK*XimWH>=M#4QY>yjf!snIdtgIq2XZR4a9aO5UF%JSA3;T5^yow*ehkF3t|x*j~Q>qgf=T97I-BE-b@K? zXSUH31l$sdcBfkLBflXa~a|XD#3Zs6jxqq@_Yqt7U!&>lqL-1EufbY*LoNp_0 zEXv6di{xpwp?&QTw-);EaOE`L@(84_M=N8OK%njs;M;4R<=<`2Up@BquYY}bsox-M zR|3I>G);@1cZ!H!CqhJS|E>kNkN_uR!uePk)2Yjv@U|wSVoOdaam%tYWUC)ya_rZ?<4WPIOM* zpVQ+ILu@lxA!>Su#2vXBDG~Vs+>^5WPA5&j@#@JF2aX&$GK@rS0JbfGWYA%F&JX+- zL}W#5P1s15bi^}rAn^CR+EeFsh0(ynXU7R&niNb0mMo0M z6|oaozgZnumKZEK*mT-j>fx~k58s^)@amc}(MX`!Ozes)o3oRZ*Fyps;yqxKQnS0} zWlOBs-U;gaD4U#ilvSm0*q8%TK0_U2;2*aZm%cSWH#;?e!@ljXtqCMalB-1OSDxpc z1h7s{z973KX0^i85#ia0a5lE>vHg}9TW1j^&`cXR@fYi)*BKf{Spi} z3mJv@im(t9e!i&iy(&-40Z;M}$xb%-wgncF8tCJcZt26385VKVQ-jd@neeeuLg2fnkZk6yCR|qe! znu%O9X>N77WRqB8#&3yx45N@CeiJBf`6lY=TMxQ{Puxa``|X&sdy>R>+<1AD`bOhO zO*4NyN%c2YG<|G(Vq(~t^&zmW3BBFSuG|tv(+{q z?9xC}f#9JL##g2_b`}{CEASR-sFd7H|Y0CusIDYWF>nF0A_r_qyow&B4K-{*lT?x)%>MI z@LNHo@0#%ap=Zbk#I`38$<{%l*9iC(S=(E2ayLi;(|HvSVA09LW)Fgoj0k>fim|W6 z@QD%kd3)Sw^uNYDK~^9_ODnv%27G@`;i)Bs_0;Bn*-Xb1YDzHW0af${t;~%Uvz2*g zH^Q5zCAO}QZR2$@16@rJ?wge{6ZC`0v)}Q!#=I%p;b-s9QmoKOl-Yb>P-Nftq&b1u zW43x&BZ}~`g5XOfjk`lOUrWt!tyw92A?(qRFRXF&r6b3VjSk`^+z#0G1kz|U5XJGb zuhcIT@CrmC`<+}0Wb2NlzeP~Hp}?5}@W?3RlOtl>{Y>5!H;+IB8w~;;$VjorqVVQA z;jiZvo?213+*QVV1nv#q?E?>mz_9=rR~w6QcGE>n#xo_&7YYnwQ6S^xlAY7lL}4~k zSdDXudDYuF4t~FTXCW5^%0BwpCM6|YYy;<7z-nSsSzA0BYtV~)#M)k`&huYnLusL+ zfFl9$#j?gDCB_clCfzvu8EgFzvdB@*{OH+pXOBHJIa%4#7jc_l+Y^XGGF7dzOWX6^Tfn3<(QRR)mk&2@j6|`-;}IDUpqzzVXk$VKZe01&K`S@P!WW#4_P; z7c%l7Lms{(nJHiJNJ;RIDhdyW3KQOjB5!uQUA82mUGH;$T4Jk~Y(g%4Nob@BuQffq z+49iQIfY1{G|iAiwgzNu2(raiIh*K<9m22H2xnV@mKNhBL+0Jt;`by^LF6{kOuhR~ z+=AefV8R3LF94q_2|iI2REgc(lkL_5Xq;I-+R^EwXVT=r*1m|_1GfZ$wA*c-Z8WZc z=y@V~3WR3=_i2lYACyZ)i%q!B6bN6L0*=-MzSnOBb<>W(9YaPO$`tP#3Gl=s;qiIG z&Sh#HJ41dkR4UoQzB4jH=wF@EfDvoa8?VjQu8W|x!8-5$3%vP-gP$^*)t;H?($ z?P-#S>Ijcm(txh;5p1$cf9&?Kn&K-d%M+&TggJG^8&F2&w+ zao@&GoZ57e2sP)KemDTWR0IzCR<&2V$OS^4fOaODKcA-QgZEVqj8%s_l=p>OfG23p$g;&Nf3K}fqB4KW3b2Vy*~ z&vXbsnpZg4By_Yjz8Xr-Zw>M&+YX!p%L(w*65*K@GM`Tu1QTIyEj>6L98*ANTgS`m zF$T_Ls^)ysM+tZ!AUs?k?DhcREE3`TZ&aB17;F8hd2jhXRExu|toMOihCniuJa2KL zRC?MY^-F-}x0XQc&cyk4zMR-| zAdz-5(#2Q$0-ti;~N<5pA7{ESEISKra)YzB={cOp@ zWAh59TdG&c?TJ4QlUX+vh4;uWxHIi>UPN*ELoBz zN+LB-5-ExVNf5-0%mi}IjPc%k&hB;gIXB{FG64c05D`hlvlz@oWDIvWXV3fD1jUSb zw)nU)=GnuK&dnS@K8j{tFZPuJ*=V=zQlqg@ilQG7@S3%jWw1sLrq15=c*Q;sTfR=?ao3k!JAgS1Ab<^6Gl%?TKwLWK?KG(mK6SeLF078SM--v=ziufaDL>H zd?3({xiMZ*t5BffJlbDx5dQX}!mAq!6EX124C8Pa(pzM@)rNh%357au%MkCQX;~@> zbHpHwgHR*S%Efgq3#6nW~$_C+EOC&iqxAq?$XMA+h zVm#`Ra1Kg?Q=n0Co^z{PMd+WeLSYP$MndYc&_O)$C(0I2S0Kz0z2PBZfX-RVPh<%{ z)jE%Z139UEh<&F(+U>U8Xf)16TK^PG}@pbv?3S#B17P}GFQqcs*w-_ING2kWn& zGvd6Rg%;r#>%ea|8Ff>bgFptyB7;wqEuJe|300W_({34zit|Lsf>f^?eJf?jGnfL` zQ6C&Se_q`CX@e4RPdzxs_~Utld&hv2P2g`Yk%Y4@HYq65FfgC;ikcB_7Tm3?@a4LR zC7f5?iU&-q2C>4G^6nB73_P zh_4%F)U*}OEAc+Wa-qKD4DwRfja1>Kb%kGUC@J5~n0=+bRVcB-vsJ<;%fPG(o$~qn zDR>}?Q6>h3Y9^;ZHjVdUOsL@M*DMtj;@QR9=d8+!7;JNWvEWgnF~3 z`-%K3^QOl667VnQEk1p~N)^`Yb%noQ()iA@lH_iQNGi8R`s3!%oti;}Ncs6OzugA@ z;j+Rv7B$W^2o=qEVv=P%!F|=diFU9@ZHc>0flXtDbs=B7D)&G%LZ3uL0vxaDP$Y{LT0k56U7G4^{iEFQ9D=5uwK zWl#13JU~ zf_ZX;0|9iNs@Z@BKxssN}>8TTX%%ry(&a+XW zX^^eP`KH3}wiM2`fVT0Q-x5h#@4d0Xr)w5-+DfCB<@XZEYw|gqYUnP8@*8R+@xr)m zAtZT19x`CF-h|$eT15Ed0pQP%GVU9L7}Hi+CDK=mPO`O!q_0X1)^E6(5{WDS{G7#H+4?FV zR=#nMa$ek9hx`*G@=4{>O~QAsD7?7la~3(=#(u8Mc)rSbEb__4EOerCfu$&?#ap+8 z3mM^z^Eg7T!JWswV~mH#Esm6d5-CxdgPa&dLC_ia{LZunj#T~3lXES?Vw-Tb$yjUy z4MV7C!dOJo#ZHB4vLP^-hXPy(hq^VBFv*PkAFUAn<0ZmBUe>tK)S`S3R~Vm}0sigX z7Qa6WjBDyK-VXSBh7q-LMwdH;m)8}3wW+X{c=usKGd@#cJXvB)?vAfe_YMr#tl6_X@<}!i5Xw@ZrOoX_{_XYn}4A2NLSP zSr>%5p($&9EcJmg!slig&rSoim_po?#ge}rd#acFAlV!*Z7BTH6~c*afF*A0H`1IU z{7DV?RK;y{(=ErOLJB>P13O=Q+X9y|;8dni4}C2Ta}}T4d2kFUYZ?kG-W{9O`0Xa)9~Kq<>Y|pC z@Qp-4YmFmS#vjaE{Kegj&m3UPlz^tSfq<~rKQt&%2)j99f2Qd@(GzurdgcoOjyMHU zws^S9`?l97D2e2i!u*iS23R%-`yoF^B1M#faEjui=hxB=)Bm#_e(dWUND)g*OU;7^ z5B^4L{e7j>FNvsG{QHgOMv6X#Qy@pm7KbVpwb(+lr;=yl+fqi_75sr7emNnWs535h z{9vdk=>3{vJXnIb;xpO{OR`Gx7kf2nyG>J5z?3G*gXq9zk6v^*jY=Z0GB%!r|L?Q-6(MWOew|_CB|K`mF7U{DWsjA2MU6P zLodjbf^W1kzMkK`ZX+XHX%pJU!bYK)aKKx}mq_={M>f6kG%|m7#7b4xPgfx`DmGfc z&#kW}bJy!%%mPnN0*6YFIW5j&O}m8=ROn?Mw4Zegz=2kstoNGpzM-&oW95r3E(smG z=e7Frio!z6=Q~sc+&^aV`B}!74+7830At!Grt^LeD&zzQia>_dvcOsbtaJ#Su(#dX zp4B8Jyqa(oAG(+ObE05EG46{Ljz)xcQ_tEE-V+;Zk7%XuOS9}y6vvxL(}A_W3@bh~ z3Irjke&2ofonBg6`Y|)#Ym6BK@Gvvqu&0$LaUIHHodMGkBs8)^K;H%-O9hUiZ38T{ zA+s17nUC|Da0GLTWZK$HVAz#T@;o22$4+PX9uTK^CW(bJs2$w+tqi!5_@*oq!eBm= ze(TTgrVs>z6KazY;nOn^PtaMBKVEghQDLh?__guuu~!ntO5(exk5)(oVo7>Td|sv7 z?=tA~`heiB2Sp$o#>|p0D3DRc>=Vsr8iZeO5`MIz@Z&YYm6o?CYBA&faf{E+T72aY z&^^_!}>Hg>}#W~GVBobZAGUz!~qI8Z%z&(-kUVzIs zO!0I|#-ifnNS0;CQa@6Aq(FAzL#03*PMkQA&dkibTPl_QF-ejG#+Vsqo>xlUkP>0` zR;ycO6Pl%|Z6fkki|vD4%_-O22D#8yxYAKRyc9f)ngR}M2;bEhuJeG{)0s>v7B8es zy}>Hm#tYo71TbYnNi&9^=ti(ZM~73LiAWN>6>TvY0pDFFoNE#~8R7J{4{w}lGEO%b zpK;#EI51xd^DWpbVVNpPdBMT{1yf$~tz>;D=UtL#sddK3Z?}LKR|&sZC!B0(Y=v^F z@fgxw{>4L9@*a;*dZNu_m+}~<*o`~S3@irEa;mYJQD|&r>2aFZj4|?E_3x(uZRjC7 zpcuzA;ZUTok_0W`Ka<$nyHb0C zYYudlj1y3^CC0e!&+N7QJKO?yF8al`PZIjvgpV!FMvS{7mc<`mXR=s2)9==I6)al>b_yq?xvv5%PG{n6i@7s9k4a ziw*rwV+uGFSux1oAteeVkczqsanM>m_Q%JLRsZwJll9wtiu(>9N(Hj9vElaB`oV(- zUu+svv&{3%{4{`x>-`K_fXqVOJ`ocpozT&|gInjeZY}~+;}htMZTEX%$qf_Y=C#Gq zm@z?q@Xz~s3wb|_yWwL5DT}R9VJMJ+45(U4AW78&L#R3gL0KB9%+fdz10S6PYLS&$ zB2zKpl?{a}ZD6~@cr$Z?#)?2a(;_@Gqwv@`aLgG5v7cCHL(^jJSr*THr}xvp(&pWH ztm2a)`X={MleeNJtYd+VlyI)acy*KT@&@6xE#OQOsHb6)rDoh+0lqNL`1~B>lQ}&* z`87s)P;3a3L;1Bx3}HEe+~X|VG^Kz^MKZ>=Ot`MxR{r-AF=my;At^2an%$ldCd>9f zW^m~B%a_Og^mCtE`T7el4B1r~EIyP9BuA1Y3$rsb|Gc`sKASR+G4t=`rOOlu)UK9X z&-(~U1dK(DTI6Ry_ap0?hsjz75>|2#W4T)`gISsxJ1%mJz3RDi$&Qepn zBhlW-7?;{W-FTriYyt#@_D%}1@Qfjz)T85mn&G`w-^=ooRmQmnq+5A4WxTuroNfTW z*&>Pae`-qOV^aoqRs6I=6_kkS`FT5Q@d_nKx214G!cW}QPk8|Y=d)Y7?6>~^I?RUA zp}}u0VZ7BOQ5?VCAiS~VOPse%XaHoASt@r`Exvk~@zo<1503*=p>HN{5@dG_avZ~# zAEMxFv<+~%?dg0>P*!GETSSIfX(&Q&fMqwRVCTwL27uzh3h zgse_C2#YNrj&ue{J}u`!8F*^a;0Ef$r zS3($PBY_kNY^Nk8tf#gKzuP7}xeYuy$u(~S zjTx#+-aXN+rS=``$f@mXETz|vO#(k#WBh0p(s8{i)oe=6^x`U* zj>1w$Nduw3c8qClr3<}Eq1}+Lug539mnA?-+b79aCpLoluHzG~JPLB-YWC z1|}u{!J!-e`aj!S>ONjP7Xed|l0>~B76H8MHfNi~TAwNPCFS#h(bXaBGvUkxtq2nl z;lUat(e4E4C#DJCz6|_s%jX6xtXSZkRLQ>=+Q4g@gzuLb_tY4VOt1*uqZNxop%17U zn$#$G2|ywFp3jviSg#Dcr=qDxE7BXJ6$k)epwO}k>j|)&Sebo)p-DK`QaImISZFh@ zbevb}r%|?TZv?TUu9NU8MgWX;6pK++7a3CfL zag9e7H4PzgERh#LDp05FP!I)0V!5YeY`Dh>2Ub}_*iL}$Okp)4iSIk_Vxh&j)N%h- z60&Jz1ZzStm8^V`R=zyj28x}C2y-#x*=fcf&RHBR`R??#EmVgT*YXe}Kf1ig!Fl8- z8xRF6Ao5jI5s6V0htTVdLWi|dR-WIkTe9GrO|>zmCS+kF?7$tRKpfQhwPfb5`4?+b zmBz}JjXFv8Czf${7$V_?HRshZc@mXn?nbYHHLuR0;4a_#0nyF_sfR=;3Tgewx!KxE zBjL@gW*ev$D+-ubz#;M@Zrjw0v!7`~XuY49@O)Wb-V9$pBu8E`#rcNMOOx4q817y= z&*4}Vcxap?GRUIg z56d`dru|&_SQDnpEV-Ii=DZS%H1i>txf0{1KqhHpFarR3Hl?39%Bc zq$JgJTNwqqP2YJwry(3zR0uI%FF1IU?w*wthq2DoBXD8X#@aHnY@|ST;f_-vXWjo6 zmb3rif&1QgVKV-|S2j1(B+I^REFLJnE;rPiZjf=d?5Q)P6P!Y+CzHgiaf|sBIaHgp zjP;{Bt?I@)oItM$lc=w#y3rhk%~^iInAH|XVqh!*>VZ1lGGVdhybu;b!m(iNO1L0g zl%WlG(0fa4yB8{0u2A&#cJbS>8Mtyb5ceF1{(;XqTB>gzf6I@Lg z=UXh@+9gsVz{}E*>G`%|os$+SALh!-q=N!6qC}Jk^VC>TBF-yt9!MuVoWe+R7KH91 z^go0YQ8Q*@i@R&UW8;iZOj}VxFRT(?TGwb>LPS;+-BcVZ2Q5PKp9jvSdp%$j24k{U zmQIw{ja!?93oV7FEM(*JALP7g5+&c=n}sOB`+XPkJ;6ZCp6%De-vhVwEpx1RXjlHg zaK|Z-9RJt5XIc-=&%ZWaiT`t>gRGsJ|GzbSba!Qz6Cz6;!lkywJyjC6&nSdQn2O%W ztyLm!YqU9^4<4xh*mH4J11X~35zW3nP}o>3NqCEck^8BzVp+PfonT!~fQv1X#O;`3 zL_whKDl{Ex>^{ysdNZyq#x!|{lYIhtIyU(Htd%CjXX?IR0(!AE%J2+80Yq$G}_+9IOC$R}3B) z^Sv*Rjr&sNpRMOTO`ZqqP#Ktt@Cp_-0E%u=!MX_{3m zd!nazuYX78w^E1ZOx3-0T+Fbkp^xMa0C%hcambdJ)4LzM_ZMqPyLoxDIoEC{e+HIr zb@D>MH`SeLD7>@|OvZ$#CJ19>ee9fe5i8z8u&~Ecs7LR8x{~md682q1KEZw?IglP5 zgfCs3(=6nQF%de-hws)iB@KnE9j9PCf0PO2io+VldOvW;NsPsw14=`QH_*x$n}$VD ze{2%?$OK8~>QaaCcHQ?_Tx^qEkCl!epV}~<<~+43colx|mOC*Q2Cas=p>8U^W7dR#-}J@2?S#S0T-Sb1@4+B$PsP%^e>h2?_C%CQ%@h zQK3R)U(SuEP&-26885XVv@{(*eo&r|m@uQQj5CM>|CRTaKj=#3!cQ3i%^z&S>4y_{ zv;sNn9;eP_|LfC_y!pQy`&TQ>lS!I=)>=C&@n+>aT`#nO|98;~meyf~XC|FR7!t7j z-ni=rU1nV&&kB}9`+I1^CE>I3<(?$Dnd^6bxbU0vD8>jl7f7+%p!Ozk~3j^YR00TS*)kP zYU)EjrHGvxszu6`^NB5cvfd5#^PLq@O=xhJhxJXXQl%UM(7GQT&TC9 zVu!%Zfr5gdt{WZ7jD$TlPxSFk1p$`F$@@s9QVG%0&wmLYdXJY#?+pEoNhMp^r?i-f5($#+dig%`vF8P~pB zyrig0<6+qfrM){wcTCRna0bp+<}Gh`-0uBO`P8D^A|LeC{{RRIuxXqjM>yXmA8HLL z_K860Gf7^yGvkJPv65TMTyolQXso9QXm19+4arp_S8SE zkWMVymbLr`%s8xxOL)qPDa+f)#&PN*)ny|?53SXGSK*hmX59k{q3C9KWO>ya<@BYTX{8#aI2!R_ z?ZQZb6i}~&>-CMTXP%t;)xwfWxsA3m{0G+ZU==rBGz4}5G2 zp3IL}Xd?2_m^Z6LoPen0lhtyt50_<#M3|t^j6OvdG(#nm*TkLz0F7uA+AP?I0Q(fixod?pp zjzz^_x%Yb!yFWu_#0TNpHY8!4<)qinJ{~a+S9~7FS?J0B)E)ue-)b7dTMfeHPQj}u zKlkH^_H)fkq2cw$K0UwJEQF#BxBi&|xhNN3*aF)PrL*K#p52E5BL%Vte)an1*1!9A zAN%DC-#y!|x3gwyOqOQ$QAL~LV_30VlSj7)&iix@vQa6)a z*FcDW1I0Ezu~dVnV1XM-q8C9SRHMTbx5LgVk9=%jx_&LN(or%;V>NY#5`T28f=)+Rlvck*vx>p>Iy5J-XEM|nR4E9#NCn3 zH|riK^4xD*7T69U$)1ZT<@5E_+I3p1j}*u*j1Pli6lRQUh2n@TCz$l3 z5;{b*4nrFaT}pw@Z9C`vh#@927%X0R0l2ienSA#CM@~jM{%)+*Uq_Mtj#hMD5v;Zn<& zN#~wHP5Bb^yCOfw-4?>R9`C;=@>jU)w9SMSOW1;+xgG?gC8D|_TB+*l=JU@#pWXVi z-QO5_An${J|C66&HQ>x$_ujv?vAMF9>a39__Ibm6k7kr*ct3lpixYKUCE~o1%N>JH zA0Rw3M$$79D<9JG&1Ua!d5Q-T`Dus|ad6K)r;&5BTfqa-Ax?Zf!lEdsb_y?&dCS!=(cD=gSoVjZn8YJsEQfKazA1p^zYugHn??aMxY z5RX7Ala$X(oog~S6N0tWWsROMG43i`xleZg>ibag46G!6NY+N6WXLNJr^IUpjz}L3 zg3yokpj66Gy%S_ zbZPaopDBHR;q8f&OWTcCJ1Kw3So?`A%VsoClJF8Mp*>kkSv-&z*MQ$Y!1xyj2oH^U zItmpMx7S*U96XT82$(4`YO$5x6SjnufK}s^2uZ1FiFQ_d?X_u&N#5e~!tW(<5skx< z#Un9GN#uqlwAfo=OKpXB>Wm95gNJIK>&fin(1<(s@dz?6WG*r|?L@6rXrxN$(`8I* zXs8@U=#WTXNr1Pvm8=V29mdi)cvr>Zu5u<}o@}3TZ%`f@A;!5j;dk4Nb|{Tfq5OPO z`Ek5%KXpQ%P0n=4g)lMi;pOGOM?oPugX}`8a4zv>yxf}x$y%vPQKZjw4oo+3y5Cn~ z_%ULayiOQnaQbwT&82pItWtZcRMBq(SY@`;3`_z_%J}nXXj?|%d>eS9PV(4FJ@MXw zlAm^m!pwr*n>?R0k+8zs4TVz;=gQ~v4}1^9Lowl=h^1_EuT4TB>!QQMDWsewR)?Wd zIpw%}VTFJW2&YqpCCg4}O8-YAsw;|w1THQVIsV;sqn4K8s}Ril~$4V)boEh%lO0$ciDgUBSe9aXW#w7 z3h<9tfYpxYUQmhx<1z`k;w3{klMIkw}fr$K3|EHr!Zk64x7S<_WiRd11(EZOW&|8o<-9@{@r4dWNZ?4*HT8J zJhDRKm)ZA6oN1DDV9(NiZYS)Gz8%wq!xi8C-7-ELxS6rglr%ENOw8R`eZv4oPZ+Kz zzOdnPf^N@(yYELzBsmb$&Rezn<|Y-3p7Q(Su-5UUo|ikyPtwe@hzx8R;7#W_WC}I1 znA94x63%3viJ@283)Tu-3V2r=bwNu9 z$_TqLQXn^puP}zkFRhmI{hX%FT)3a|e z$wpf@gmnvfY*`jX(g;eW!%92~1J4V!9{M7Bt|>HoA#rc=U_%f$~RSB zXhEhiHZtGbsVE;Q>}Qq>kD0uaKpCX5a10jXnnaNt(S*A+;qJ)dXl#Y@eOi%>uB`7cr<$SU)Q-8M|BL(sSuvoA23ujMk*sSt{gVoBZ zy3XG081tu@vCkUj@!&~^YAwIvU*1q+RlQkvN(6XrTH#2=2s5rF1z)6SrMfCsEMrus zqhOT@k)9XyYpk##Ls@O$baG09u#gchW{e94IFpg|s7QS~E3`mJEK9=CD*!6Qi>Z)5 z@f6xNP^WAg#<@0QwF9JMURd!y?Qo_LLoX|f!&Psw=Y&i3l!fv2vXDsnT*)xy5mX@h zyVzz?ATsVa{LOhFa}|rpI1EE$Rp1*KZuE}S)0ar-)BqBmokNuslsX&%636{DR3R2 zH(2ZQ?@ZQOSlSx&Wg1G16vziez3!HCyM5W1QwPhXhK=-jQu^~`%omKckAT@3JK`zW zJm%W5_|7uP2KmK0;S*CDADyx|KE@&dRH*D?VhLXz6fY$Rc_v{DT{pl&rg2s=R;{z5 z7^hQ~cyg&4Rcp1ytw$a)SXvs?6v!2&?55VWmSj)9^DVo^B96HcfbV z%nBunFa3p#COuN|$!oXRbAMsc0Ke=I&KbgD#yFh-7Y(o}iNw52%67YU^C^T>&>KCr zQ7S~Dx!X#ecdlfHu$~akwUl(F&y)e$XAXTXWJvjjJF#ksP}0ug^^&J!32Pa|3)#*{ zC_#n=x!1RyD#?E=r#-58VQ3CBGn`Vf=nWgYkv@~b!mmRqS+|6xMC0wcuV{3x7c=kC z;_tzs55HSwNa1di_r@eV8|L1xgK#djIFTxuq3&?m_|o(|_d=kKQtEshMJJ|ewUwn8 zUg+`t2YleEv)&=SS zMYI-aeLeyf7gtx?eY+0(5Ti2Q+kkevZMU{N?YN|_=%{=qiqt6rwus#};0Z8FGVqXC z#+{225G&u0Qhl=loNRgu2WJVdC4_ey8tZ8<(dQIKCyc$dIlpU&39Ihbp&tJQc1Nsy z$bhhOR9$x)_N*}po4J3FDZ<@VCG5kRdepGg$3Ram3*W22@^htp2#{EmQBDcYe#Xfg z+ral%6#jlu^zGbR-D4zdUqL}ILYQ@%hQwR_=T*i#P3LWB z>4R_&rx+JyVjqcz(y=TFxQgNg}FMrd!u@tB5Kzb&{}6&sTC$VQ>s)}8jVKtHl6ss$4G(P zMzlLw+G@8~|J(oW|99b?H%~8F%WFdV3DzL0%G@FXY4YMeH7pe;OC3mm!a|_xzSPn( zILKMY8KYn=QMY4PQ?F5QPl2%Kz6-Cs%FBof3C^&|j#YriCm`cg^RNl}PH{r-Ia3WK z!e4boA|Wv(o#p3Sgx9v6KvMYrGU10SgkNlEoNYP_*DDQC=AnmRrUX1V<{nmr??4m? zQ#T;*6$oKH176uAX;8h`Qi1!)pRe;yHamX6)WyVmR?b`5GM+3_dK*;eSSNq){Ix=l z1foC+Q z_riQsPf4TZ169W36Mi$R!*`BP2P zIYUYdpgcgP#6q8mf#X${GS^zq44S^;e{38J6A{^KU-5cxvj90C_lrV@`S1C3DGdW zoZ&-X_tk(WCmFM)!BDsKN~#G_==C|3+}!dxxuQ_enV?YXsNN}}qq6zt~c+{+8Qnd`CF=eXx8A#66YZ*{CG`C_zEa` z-2FpX5@i5Rf!xi^Gf9#(o_OMk)!EtE&hqkd{|{;iF-oHM5mL+1Y_@U!{P|CR`suZI zPM$i|Y2r15^f9H>BTA{qK=cu^cATv78(DU;-I3_-Pf*2whYdoSq{7Ear}n8P;Zufj zPy;mtZZGaJXy0h%&kpa03gwi0*oh`kZ_6WtgmtcDO2U4ZS`hEzyfmOec*$0i5N2aP z$@jJhQTeV1Vc$4*J_k5_<;Z9*%{Lyv_~iQya+<>ZlF>jo+i_p{jZlQTGj_vV?} z>okhL7biLv{}_kn!P5;ti+^JWEAn6M-uA_&MGxNIEXWsJlXiv2*@O9+}zoR8eB)|Mp7BhtD#ZSIxpgyDD=qAR&&69TLJl zr!<@bSxyMe)W`jOL`p#`>y<|CM-;fcn0wNU6o=UmjFk>!H3U& zCY}k3^ z9EccWCC|QG4r7&dXu!+`I#Z9yxp!T-cVCN%weBl>tCzcT-+k$Ehy1 zUt+8fan37R2-r3x1dlHT;-I4?prly(DJt4m1;o?~JrnOdZl+>5h`JW76C$bg;i!cD zAfbi<>In;#%6jI!2_+uT#g?aqxsv!?$9C$w^E0*(!i4sP1G8lk1#+;=I8-Lgmn;s%kcLY4n21S;Kua=llSM&ADsXglsaAR` zNAhMtdF9Qla(}JoA9J3b^YS)RPm^=LO?YwH)0Ygy>$?Vs++Cn~fIpk2wyw2agE4QI zJAA^_NP!F<6cf#EwUeh=@zVgtoYn0ar3b%-iWZ|LU)gk_Mflzo!uf`?Vu6WBVY1}u z#A^!D6uG~ zbQZZJhV!|;#N7}{USYHMnOyzZuFzsgXGpEwc;#zAMRTD{twSfW_BC+ zJIo5&^O^3mD!2RtZ<_Vf7#BJ~r3ER~o^T2yCQL_4c-VwaC-&n7#$!J&x2hOb?U|)Z zp|VS=;Bt>Nc#TO=7#-_Ni`{(|!=S4;eEIr%!L!`XI21+kA9=1Cp4( z4tbjEJdf-7yHj3^9YP}oCUi~-(m#cs2Lj#C%qmHSJB8LXUZJSnn(sP34TVAk9iHm4 zp)e%$*JNNhl+?OMNkNo^Qy3K;l!vCS0;y`>XQHL=27&M}cpyyOd)#5Cj7dBXcMh#A zutZz`@6>oNMM8FJ_=qfXcn6Q}Vc=g|tvAY*N=LQZr;;SOBTIZo3S@vda^y(0)oMIu zP4;JO`8gu0@Q}@**b7eRG>sQ#s}vZ4m6gRL!bt%!yg-o>QOY}wsXGJ3<{k*=^x9hf zorRtT8zwgvd6IILV)yyoyh+H<+)~j_o`U~!+fOJwSn(EdKW~l9z6jojyVjT6gyoJG z$h4=2b#usP!t|O)J#7ew$&*bPE(UC{r^KN^cBq3#8d0viL!koEj{KP@D9c@*DjPyt zHWV?~LP(RkCAU78GORm+nIjNAz|4PU)3klxefO$*=!E$A>THP6vzP4Zg1a{ zX66r>@hlM?9Q5&AKe+qW@_!o8gLi}Wk|0XaJjUqO~vg{^0K812^jLR3&9`R~5>r5A3@DK;fZ zZ+5?Xx3{osE$m`Iq#du};Ozu@9m4K1jG=>P1r|p5k1+G{!otGh^z`&UH=E7PK_6Sc zF;XA{!XpnpSiZ1u=_A(Kf9af&{P|(Xdt4iV8X|OW=awQDcrT)j{4 zoqw@>mUn&jPTXqmLkaJc=fiS>yTgRBpuD(`Wz;IrlW$}!CA?0NT+clHoBKBGP|3;* z$uL@WXJ1OMrm9dye6`$A_x|=YKkc}q1;sqA4L&QjJ5Vt<#KsFP1AFOJF@;`Kar|_XwU)TPMSVvnV$hLwZCky_XoY- zgLXEQ(XL9NK_j=Vn-az%sT|s^6#H=u?i!`mSDkG+PiP2j+xNTIcK~Fx41iBF^J5be z6Y-!AsGk@qkO5$NdV1Ob&oJ;1i1dE(KJFCc!p|v=D+x(8n6Wp=#dHgfL9s8JUQ3je zGpA19*stW_^Rw2*ZRt$=kL{ zdJ~-WpV}hz`RxZ2NzY8nl61sxA!G!01?eltv?q%Z@2gu%l% z{CdJ1Ywff#<{@Lu6O~G3e9*_%FN_pOpJ9xNthL8O?qh_Z1xqE?R$`_5JJ~Ty{`P{x z#&im#VOhHG+qPK7Jj^NPI~L9`yvthq2ocSF=}TX_Bh>szf%Fmc^YfUPn3xC(WTIff zj4))7jUhA0t%Q)8#`9s(jZzKY1Ss^rq!!pr7|W>@8j!q>`HqlG`vjq!H~)Q_h#n-O z<8QwC=H#G{tsfXEkUpZ*>BK~IFp8oh%v>Jy@r-bFs2~_xmZTD^p7zKMZ>I$BHuJG3 za@;f|qj-fVgA0SEMi@FUbCsD712~o>$sM6UMtvZCgi|2K80Uf1M#_7CAhk+DNG)^Y zb53p=fw`q)A-rJCjC{v`H-t9wT89xt}`bK7m;{o=mQpC93RK}3p(s@B@^EX(eQ2*bz&=_8UP zQBf4d%&fH5H2kzBcQSmk!7yYVeJc<=-fnb>Tms$tcE)HK7EY|uav5$gGpx07FcR+w z**da<`wPmljGO|=ljox(`mn-V{1%M5@m=XD@8ku%jo&POPTPHN=7(k7E)J{_ZY`Ku z8)I~qWp{*Zy(3O?h`>Z74~&$@(1QY{c|8N_hNPJgH<7}=(Yu^i>dNH4Flo^{M0{I# zutvDGC{|{D*fCNdeT3GUqbSOV$OdI{$1Jq{1YZT^nT8EJ<>7~1C05;Z8SRaVefRn# z3}H$`XCvM8r;X4r5Rr-FIIC1D{p&g%Anu4b-LODwZR0pjt+lB!hI0>Oq(p`bzFe0> z(ppI<$$0@8?_%SnU2n>Te3#mO4?(|++Pv`Oj+Z#;lR5<=Qy3{U7w#A+Kf(|~DV0W1 z)Gn9H>7b9L9~gNceS}jUl}e>fM6JH^VvKOZ$mP6cmM-v4om;IWt#y0Q2h$IXJdi$uh?23f zvGsPly`hxKthMD)9%NWSAz|*7hp_;$f+p@#(dQX;OWaMa6Zgm%=_OAS zgfmUXi|fE$O`xm^kpfD@h{y?f$jo#V9(f>3W_`;i_y)kDDzQN^_0lT6zj_`l!sRwe z@}5IUad!mdXsHYp@+g8*s;QJ(DwoSkU;XM=8(;g{*9LuPw;dw|(q|A+rj&XYz#4#K z{;82f7*@DjlQHn?4aP6TPUxTO5EfFOpU}=Xq2Of%k0Bz0CgnX6 z=ZPr)T16^ExI#EAq{F*Pqxd3Gg_^deel5t&%gWbyRQ<_{mlGOu^MiqJcb8U_@!e>y3xD$hr6}9g%@{e zy?q#qiX&))g4q3b?mZXY-2K3!f}+tR!l6T<4BsfFmX%T`s@3Z2TU%RqME(QtM$!m< z#`^kt7C5mEF|$)5CG7G(Mq#Z1gL>TU?p~;n=yo274k0S#ybcw@Rf-CEkev;pdr>e_ zc%AcR{O7p$htKPJG(Dw3)O(CDcoYpirBpkP<5x?i(syTOW`1`6{r7jyojW(sn73tg8#%htL=pL%7VQ-J z`6CZ$<=#n7vRlZcOB+b2>*Daf(SjHv?8t@WjHx%>~cTJ1-Ri;D}VPMsR)@%0m< z5Y~Y3$}6v|DW!fErW5`-fcr<)SVM^xDLT)FH=OQQKusyCfGAc#WSJw&&|r>a-V;l` zmHVmCEz0divB21=EJ!If?FIB76 zAI;6pow{)0!W|WZ9x0Fk;q2M7n4h0N*=n`EZmo3!!rhCbxmP$+CWA$&mbHp#QAg@~ zDf1~~Y!)dsskI(gN>!CoCCj#CtSwphFg7wynSgarAWHlR5*Cusb;yCKgrs?$2v%#A zMOr7MsL9qgt)XpW?G_Qm7I>JMKf}y!ZN!7VE+hE8sgyEAyl*{FBOifMw_LWy& zX$|_=`i+qS86cLImz$MJ<+V7D|1wFECNqDTh>lomyZ0qmBBR{MAV5t;^y);V_IHz2 zdgJZo<&2oszdU|Co;`dherIthYNm}i<23H1nc8gHsGX)N%bMgQ3|Ld{l^}u=HIjPe z;gl0p$}wp>QLdQUc$Dc{sdHE9KvIq4_V-?UEj>0p9j$d*pG?wh48UaGH#9hmkZI0@P}?ab|v&ng11l=dHC5G4n*RL1d&r z`VU2PK91x6V{UHl-=8^qcHwHT`{57&0`v1PtN-nbTli0%HMP7Pif}5^gwh7ViI(wD zhLwynQq91o+|c>?8vf_<0ekq7NBB>F@Pqe+Kjx;V?rpU@f1X+UZ<+B?&?s-&FXT~x zU3-s-=wcK_KZv5}8xs=~FI~KNacR)UIw*{!(U9Qa!GqN-%O2}=I$yNbe##j0D1bu% zs?Jl~FA8M8hTeAf-w?n$(m$zIOaDt0#Xq@xd1ZSL$5ktr$8;2ZEX~aSXs!KYkgSP) zNrHN#CF*-v{@3-M-LHMW_4s~kHhf;Hlxh;uQWQl$EtkvxMJe^t^78T>>3QTBSu;a| zMx&8wtyia~r{Br4>>?3ug+=XzMMOk8w}8T1hE7WF7bUkBb|14+Y9)^JcPA&y-^jA9 z)keb%BsqH2n6z0g*8v`5;9y`cxFbT(yTi9P@}BE`epf|R_vym$x0HzLTI)+h^s3hS zyW`{If7@&}f4Q--(YTTG`7q*+Scv-t^?E&tqG+Mh>D0^R@;Rl{tIiX#);`G0_W+m! zFwM-h;F*-1VnNslSJ@2L#ML(E`@Mhn-*;JlHn)=ZOJn4YBu}UhyV5QmpHeD+j<0dNRP+dLgqh)L3g=oLhNP-` zuQ$x>?tPnxoVT&*JPvmolu~aq^P5o=y>s~R;j_Q}?Qic0y7u?OaIU%$Fh4)<9+QiU zi+6Q8oqL=DA);f8T)xE1-2 z9l5}(Yo74>!uKloU4{1$7s{*M`c+EVuoqHhZdq%$o#N11uPLP#l~U)GQfIW*PM0sN ztgPHIUFO$=;ahbh6d_X}DwRr9tJR#6aNfssmSr=>m>I40wAOmc7*or#teRz6#d#%K z>q-(M! zINj-V?n{#7kD2*FBB~9ekjqhER9%ju=$l&Wztvj*XnlQsXh`uU#>S>N%brb{|9i{) zyt6ZMPj48mZEor3UPIxUu9ixrUzSRxpE-*@%d*n7NJPWuQ1Duc3WSQv;2OM=lq(Bs ztqMQoy_}a<7Zixr+8I0@V@%x`vra_oN~w*>$w{X~)>l_oTUnN6?RI-$p6IY*6v7&A zxD8_5!fm(PopQOn<*XoMjI-_|B8p0-lJh*G#~*(@diB*;od*(CtJP?1Y%Eer>3q|L zP08(cJGZ#p#xY5f*hZsK^f0=fhP9TTdg>|p_v^2}E}n{W`Q5{Pmev{tg~GYgiK57D zOit5umWbZ4*8Ur2{)|#;G7q^819pXfj8f`s6h;43tyceTe0=i-<~tKDHvP?GD>?YhfXd;~$KTjr~ozTz+F~YpbE%!nuCql?tDS z>v>;(U&Ff3*M3*=JpTCO^81MsCuD8Ab?)|pQYuT+R36R{isRU&z{ZydAk!76vy#25q*M){uO{Pg^v4f-Ub7*o_BE> zbx5gKBc;AktyI1pY5n%%l`BIVs+iEaoDw~5mHHwP{Tl!u8Dyt=@je!RkD};I9LIkh z$MLtzqRTrU6a;>C-dejnEew;w|jP8i|Ci%zH0$+B#@TrQts=4PlM zn+YLQm4_FuX32g~?C}{EK%0mbBBg#`x}~)~rM3RAv$M1Rq19@=e&x!Qp$Y|#aO*HqAS3hz%$!+k zx60-6B>-occ@;njz%&s__~lxq!8f8*iWc!6O=VVVy`Yr(Nfbr@NooCGwJ|TRZEvq` zZf@?YB&oI58e3jxWQ4xK$-5*;Hlrw70N@^LM6^vrSrB${PHVp#MFF;#@VlCT z3_r6_S#wrv{p%=-zNfYRW*o;q8Xq5jb9sHenPu7jXjs_=V+hSq_r+ZGc$8htMw6U?K8%h-wQKma#|c6LWeqB z*K0QFg}-qq<+g=BpQcjklGgf!yQZa5=~vABR;Sb1+TPy&uwB;?u7Z&Q8DXFpA0IEJ zX*vmDK8m7yl~RvpStgzL%zPNYEHh6qbCsFn-pnRn%gW3)^rW=4)-C7J09YrYD@v)0 zM05(ky8zC$+wIF*>n$hzTCLWHR=`!ORWinm2SUz6aU4HDL=RbOA27xo18|SK-poAa zuBr3-_Uil&*VcJ&9i^1>6jzy9n(F|Z1n_pLR63)zzLI6xRvgEz<>lo8pZy5^#7KdR zFf=%J>{!`ad#GNoA4${n5E0E0{eSG;&uaQW5C`zz?#yQMTd22UPa=6I!3Xu?(Yw!) z*0x|P8#UQKQ?e|@wwF>`qBx&7Py&pPEhq~e_O^ZC46E|(nuxTa~Mwe~ck<=VXu(v@Xdc<=j;?!S~$ zmt~m>h*C=C!95Bgcp~aT2;JduIL>CX<6^OBt+nn`Bo4b`TtL{R_^=+<81rwxPcOe; z_L8%2OV@q1) "agent-provider" + */ +const fs = require('fs'); +const path = require('path'); + +const openapiPath = path.join(__dirname, '..', 'openapi.json'); +const json = JSON.parse(fs.readFileSync(openapiPath, 'utf8')); + +let fixed = 0; + +// Fix operation tags +for (const p in json.paths) { + for (const m in json.paths[p]) { + const op = json.paths[p][m]; + if (op.tags) { + op.tags = op.tags.map(t => { + const fixed = t.replace(/^crate::/, '').replace(/::/g, '-'); + return fixed; + }); + fixed++; + } + } +} + +fs.writeFileSync(openapiPath, JSON.stringify(json, null, 2)); +console.log(`Fixed tags in ${fixed} operations`); diff --git a/scripts/gen-client.js b/scripts/gen-client.js new file mode 100644 index 0000000..a242088 --- /dev/null +++ b/scripts/gen-client.js @@ -0,0 +1,55 @@ +/** + * Generate TypeScript axios client from openapi.json using @hey-api/openapi-ts. + * Generates into src/client. + * Post-processes: injects withCredentials: true and baseURL into the client config. + */ +const { execSync } = require('child_process'); +const fs = require('fs'); +const path = require('path'); + +const ROOT = path.join(__dirname, '..'); +const CLIENT_DIR = path.join(ROOT, 'src', 'client'); +const CLIENT_GEN = path.join(CLIENT_DIR, 'client.gen.ts'); + +const openapiTsBin = path.join(ROOT, 'node_modules/@hey-api/openapi-ts/bin/run.js'); +const openapiJson = path.join(ROOT, 'openapi.json'); + +console.log('Running @hey-api/openapi-ts...'); +try { + execSync(`node "${openapiTsBin}" -c @hey-api/client-axios -i "${openapiJson}" -o "${CLIENT_DIR}"`, { + cwd: ROOT, + stdio: 'inherit', + }); +} catch (e) { + console.error('Generator exited with code:', e.status); + process.exit(1); +} + +// Post-process: inject withCredentials and baseURL into client config +if (fs.existsSync(CLIENT_GEN)) { + let content = fs.readFileSync(CLIENT_GEN, 'utf8'); + + // Remove unused createConfig import + content = content.replace( + "import { type ClientOptions, type Config, createClient, createConfig } from './client';", + "import { type ClientOptions, type Config, createClient } from './client';" + ); + + // Replace the client creation to include withCredentials and baseURL + content = content.replace( + 'export const client = createClient(createConfig());', + `export const createClientConfig = (override?: Config): Config => { + return { + withCredentials: true, + baseURL: import.meta.env.VITE_API_BASE_URL ?? '', + ...override, + }; + }; +export const client = createClient(createClientConfig());` + ); + + fs.writeFileSync(CLIENT_GEN, content); + console.log('Updated client.gen.ts with withCredentials and baseURL'); +} + +console.log('Done.'); diff --git a/src/App.css b/src/App.css new file mode 100644 index 0000000..f90339d --- /dev/null +++ b/src/App.css @@ -0,0 +1,184 @@ +.counter { + font-size: 16px; + padding: 5px 10px; + border-radius: 5px; + color: var(--accent); + background: var(--accent-bg); + border: 2px solid transparent; + transition: border-color 0.3s; + margin-bottom: 24px; + + &:hover { + border-color: var(--accent-border); + } + &:focus-visible { + outline: 2px solid var(--accent); + outline-offset: 2px; + } +} + +.hero { + position: relative; + + .base, + .framework, + .vite { + inset-inline: 0; + margin: 0 auto; + } + + .base { + width: 170px; + position: relative; + z-index: 0; + } + + .framework, + .vite { + position: absolute; + } + + .framework { + z-index: 1; + top: 34px; + height: 28px; + transform: perspective(2000px) rotateZ(300deg) rotateX(44deg) rotateY(39deg) + scale(1.4); + } + + .vite { + z-index: 0; + top: 107px; + height: 26px; + width: auto; + transform: perspective(2000px) rotateZ(300deg) rotateX(40deg) rotateY(39deg) + scale(0.8); + } +} + +#center { + display: flex; + flex-direction: column; + gap: 25px; + place-content: center; + place-items: center; + flex-grow: 1; + + @media (max-width: 1024px) { + padding: 32px 20px 24px; + gap: 18px; + } +} + +#next-steps { + display: flex; + border-top: 1px solid var(--border); + text-align: left; + + & > div { + flex: 1 1 0; + padding: 32px; + @media (max-width: 1024px) { + padding: 24px 20px; + } + } + + .icon { + margin-bottom: 16px; + width: 22px; + height: 22px; + } + + @media (max-width: 1024px) { + flex-direction: column; + text-align: center; + } +} + +#docs { + border-right: 1px solid var(--border); + + @media (max-width: 1024px) { + border-right: none; + border-bottom: 1px solid var(--border); + } +} + +#next-steps ul { + list-style: none; + padding: 0; + display: flex; + gap: 8px; + margin: 32px 0 0; + + .logo { + height: 18px; + } + + a { + color: var(--text-h); + font-size: 16px; + border-radius: 6px; + background: var(--social-bg); + display: flex; + padding: 6px 12px; + align-items: center; + gap: 8px; + text-decoration: none; + transition: box-shadow 0.3s; + + &:hover { + box-shadow: var(--shadow); + } + .button-icon { + height: 18px; + width: 18px; + } + } + + @media (max-width: 1024px) { + margin-top: 20px; + flex-wrap: wrap; + justify-content: center; + + li { + flex: 1 1 calc(50% - 8px); + } + + a { + width: 100%; + justify-content: center; + box-sizing: border-box; + } + } +} + +#spacer { + height: 88px; + border-top: 1px solid var(--border); + @media (max-width: 1024px) { + height: 48px; + } +} + +.ticks { + position: relative; + width: 100%; + + &::before, + &::after { + content: ''; + position: absolute; + top: -4.5px; + border: 5px solid transparent; + } + + &::before { + left: 0; + border-left-color: var(--border); + } + &::after { + right: 0; + border-right-color: var(--border); + } +} diff --git a/src/App.tsx b/src/App.tsx new file mode 100644 index 0000000..45ef7d8 --- /dev/null +++ b/src/App.tsx @@ -0,0 +1,176 @@ +import {Route, Routes} from 'react-router-dom'; +import {LoginPage} from '@/app/auth/login-page'; +import {RegisterPage} from '@/app/auth/register-page'; +import {VerifyEmailPage} from '@/app/auth/verify-email-page'; +import {PasswordResetPage} from '@/app/auth/password-reset-page'; +import {InitProject} from '@/app/init/project'; +import {InitRepository} from '@/app/init/repository'; +import {UserProfile} from '@/app/user/user'; +import {ProjectLayout} from '@/app/project/layout'; +import {ProjectOverview} from '@/app/project/overview'; +import {ProjectActivity} from '@/app/project/activity'; +import {ProjectRepositories} from '@/app/project/repositories'; +import {ProjectIssues} from '@/app/project/issues'; +import {ProjectBoards} from '@/app/project/boards'; +import {ProjectBoardDetail} from '@/app/project/boards/[boardId]'; +import {IssueNew} from '@/app/project/issue-new'; +import {IssueDetail} from '@/app/project/issue-detail'; +import {IssueEdit} from '@/app/project/issue-edit'; +import {ProjectMembers} from '@/app/project/member'; +import {ProjectRoom} from '@/app/project/room'; +import {ProjectArticles} from '@/app/project/articles'; +import {ProjectResources} from '@/app/project/resources'; +import {ProjectSettings} from '@/app/project/settings'; +import {RepoLayout} from '@/app/repository/layout'; +import {RepoOverview} from '@/app/repository/overview'; +import {RepoBranches} from '@/app/repository/branches'; +import {RepoCommits} from '@/app/repository/commits'; +import {RepoContributors} from '@/app/repository/contributors'; +import {RepoFiles} from '@/app/repository/files'; +import {RepoTags} from '@/app/repository/tags'; +import {RepoPullRequests} from '@/app/repository/pull-requests'; +import {RepoPullRequestNew} from '@/app/repository/pull-request-new'; +import {RepoPullRequestDetail} from '@/app/repository/pull-request-detail'; +import {RepoSettingsLayout} from '@/app/repository/settings/layout'; +import {RepoSettingsGeneral} from '@/app/repository/settings/general'; +import {RepoSettingsTags} from '@/app/repository/settings/tags'; +import {RepoSettingsBranches} from '@/app/repository/settings/branches'; +import {RepoSettingsArchive} from '@/app/repository/settings/archive'; +import {RepoSettingsMembers} from '@/app/repository/settings/members'; +import {RepoSettingsWebhooks} from '@/app/repository/settings/webhooks'; +import {RepoCommitDetail} from '@/app/repository/commit-diff'; +import {SettingsGeneral} from '@/app/project/settings/general'; +import {SettingsLabels} from '@/app/project/settings/labels'; +import {SettingsSkills} from '@/app/project/settings/skills'; +import {SkillsInit} from '@/app/project/skills-init'; +import {SettingsBilling} from '@/app/project/settings/billing'; +import {SettingsMembers} from '@/app/project/settings/members'; +import {SettingsOAuth} from '@/app/project/settings/oauth'; +import {SettingsWebhook} from '@/app/project/settings/webhook'; +import {SettingsLayout} from '@/app/settings/layout'; +import {SettingsProfile} from '@/app/settings/profile'; +import {SettingsAccount} from '@/app/settings/account'; +import {SettingsSecurity} from '@/app/settings/security'; +import {SettingsTokens} from '@/app/settings/tokens'; +import {SettingsSshKeys} from '@/app/settings/ssh-keys'; +import {SettingsPreferences} from '@/app/settings/preferences'; +import {SettingsActivity} from '@/app/settings/activity'; +import NotifyLayout from '@/app/notify/layout'; +import NotifyPage from '@/app/notify/page'; +import LandingPage from '@/app/page'; +import SearchPage from '@/app/search/page'; +import {ProtectedRoute} from '@/components/auth/protected-route'; +import {WorkspaceLayout} from '@/app/workspace/layout'; +import {WorkspaceOverview} from '@/app/workspace/overview'; +import {WorkspaceProjects} from '@/app/workspace/projects'; +import {WorkspaceMembers} from '@/app/workspace/members'; +import {WorkspaceSettings} from '@/app/workspace/settings'; +import {WorkspaceBilling} from '@/app/workspace/billing'; +import {AcceptWorkspaceInvitePage} from '@/app/auth/accept-workspace-invite-page'; +import {WorkspaceRedirect} from '@/app/workspace/redirect'; +import {InitWorkspace} from '@/app/init/workspace'; + +function App() { + return ( + + }/> + + }/> + }/> + }/> + }/> + }/> + + + }/> + + + }> + }/> + + + }/> + }/> + }/> + + + }> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + + + }> + }/> + + + }> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + + + + }> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + }> + }/> + }/> + }/> + }/> + }/> + }/> + }/> + + + + }/> + + }> + }/> + }/> + }/> + }/> + }/> + + + + ); +} + +export default App; diff --git a/src/app/auth/accept-workspace-invite-page.tsx b/src/app/auth/accept-workspace-invite-page.tsx new file mode 100644 index 0000000..a1cebef --- /dev/null +++ b/src/app/auth/accept-workspace-invite-page.tsx @@ -0,0 +1,182 @@ +import {useEffect, useState} from 'react'; +import {useNavigate, useSearchParams} from 'react-router-dom'; +import {AlertTriangle, ArrowRight, Command, Loader2, ShieldCheck, Sparkles, Users} from 'lucide-react'; +import {toast} from 'sonner'; +import {workspaceAcceptInvitation} from '@/client'; +import {AuthLayout} from '@/components/auth/auth-layout'; +import {Button} from '@/components/ui/button'; +import {AnimatePresence, motion} from 'framer-motion'; +import {getApiErrorMessage} from '@/lib/api-error'; + +type Status = 'idle' | 'loading' | 'success' | 'error'; + +export function AcceptWorkspaceInvitePage() { + const [searchParams] = useSearchParams(); + const navigate = useNavigate(); + const token = searchParams.get('token'); + const [status, setStatus] = useState('idle'); + const [errorMsg, setErrorMsg] = useState(''); + const [workspaceSlug, setWorkspaceSlug] = useState(''); + + useEffect(() => { + if (!token) { + setStatus('error'); + setErrorMsg('The invitation link is missing a valid security token.'); + return; + } + + setStatus('loading'); + + // 稍微增加一点处理感延迟,提升 UI 的稳重感 + const timer = setTimeout(() => { + workspaceAcceptInvitation({body: {token}}) + .then((resp) => { + setStatus('success'); + const slug = resp.data?.data?.slug; + if (slug) { + setWorkspaceSlug(slug); + toast.success('Successfully joined the workspace'); + // 1.5秒后自动跳转,给用户一点时间看成功状态 + setTimeout(() => navigate(`/w/${slug}`), 2000); + } + }) + .catch((err: unknown) => { + setStatus('error'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const msg = getApiErrorMessage(err, 'The invitation link has expired or already been used.'); + setErrorMsg(msg); + }); + }, 1000); + + return () => clearTimeout(timer); + }, [token, navigate]); + + return ( + + + {/* 统一的品牌头部 */} +
+
+ +
+

+ Workspace Invitation +

+
+ + {/* 状态卡片容器 */} +
+ + {(status === 'idle' || status === 'loading') && ( + +
+
+ +
+

Validating invite

+

+ Checking permissions and workspace status... +

+ + )} + + {status === 'success' && ( + +
+ + + + +
+

+ Welcome Aboard! +

+

+ Invitation accepted. You've been added to the team. We're redirecting you to the + dashboard now. +

+ +
+ )} + + {status === 'error' && ( + +
+ +
+

+ Invalid Invite +

+

+ {errorMsg} +

+
+ + )} + +
+ +
+
+ SECURE JOIN +
+
+ DATA OWNERSHIP +
+
+ + + ); +} \ No newline at end of file diff --git a/src/app/auth/index.ts b/src/app/auth/index.ts new file mode 100644 index 0000000..038e113 --- /dev/null +++ b/src/app/auth/index.ts @@ -0,0 +1,3 @@ +export { LoginPage } from "./login-page"; +export { RegisterPage } from "./register-page"; +export { PasswordResetPage } from "./password-reset-page"; diff --git a/src/app/auth/login-page.tsx b/src/app/auth/login-page.tsx new file mode 100644 index 0000000..5792afd --- /dev/null +++ b/src/app/auth/login-page.tsx @@ -0,0 +1,268 @@ +import {useCallback, useEffect, useRef, useState} from "react"; +import {Link, useLocation, useNavigate} from "react-router-dom"; +import {ArrowRight, Command, Eye, EyeOff, Loader2, ShieldAlert} from "lucide-react"; +import {apiAuthCaptcha, type ApiResponseCaptchaResponse} from "@/client"; +import {getApiErrorMessage, isTotpRequiredError} from "@/lib/api-error"; +import {useUser} from "@/contexts"; +import {AuthLayout} from "@/components/auth/auth-layout"; +import {Button} from "@/components/ui/button"; +import {Input} from "@/components/ui/input"; +import {rsaEncrypt} from "@/lib/rsa"; +import {AnimatePresence, motion} from "framer-motion"; + +export function LoginPage() { + const {login} = useUser(); + const navigate = useNavigate(); + const location = useLocation(); + const from = (location.state as { from?: string })?.from || "/w/me"; + const usernameRef = useRef(null); + + const [form, setForm] = useState({username: "", password: "", captcha: "", totp_code: ""}); + const [showPassword, setShowPassword] = useState(false); + const [isLoading, setIsLoading] = useState(false); + const [needsTotp, setNeedsTotp] = useState(false); + const [captcha, setCaptcha] = useState(null); + const [captchaLoading, setCaptchaLoading] = useState(false); + const [error, setError] = useState(null); + + const loadCaptcha = useCallback(async () => { + setCaptchaLoading(true); + try { + // 使用 dark: false 保持验证码背景干净,用 CSS 滤镜适配暗黑模式 + const resp = await apiAuthCaptcha({body: {w: 100, h: 32, dark: false, rsa: true}}); + if (resp.data?.data) setCaptcha(resp.data.data); + } catch { /* ignored */ + } finally { + setCaptchaLoading(false); + } + }, []); + + useEffect(() => { + usernameRef.current?.focus(); + loadCaptcha(); + }, [loadCaptcha]); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + setError(null); + + if (!form.username.trim() || !form.password) { + setError("Please fill in all required fields."); + return; + } + + setIsLoading(true); + try { + const encryptedPassword = captcha?.rsa + ? rsaEncrypt(form.password, captcha.rsa.public_key) + : form.password; + + await login({ + username: form.username, + password: encryptedPassword, + captcha: form.captcha, + totp_code: needsTotp && form.totp_code ? form.totp_code : undefined, + }); + navigate(from, {replace: true}); + } catch (err: unknown) { + if (isTotpRequiredError(err)) { + setNeedsTotp(true); + } else { + setError(getApiErrorMessage(err, "Invalid credentials. Please try again.")); + await loadCaptcha(); + setForm((p) => ({...p, captcha: ""})); + } + } finally { + setIsLoading(false); + } + }; + + return ( + + + {/* 头部区域 */} +
+
+ +
+
+

+ Sign in +

+

+ {from && from !== "/" + ? <>Continue to {from} + : "Welcome back to GitDataAI" + } +

+
+
+ + {/* 核心表单卡片 */} +
+
+ + {/* Username */} +
+ + setForm((p) => ({...p, username: e.target.value}))} + className="h-10 px-3 bg-zinc-50 dark:bg-zinc-900/50 border-zinc-200 dark:border-zinc-800 rounded-lg focus-visible:ring-1 focus-visible:ring-zinc-400 dark:focus-visible:ring-zinc-600 transition-shadow" + disabled={isLoading} + /> +
+ + {/* Password */} +
+
+ + + Forgot? + +
+
+ setForm((p) => ({...p, password: e.target.value}))} + className="h-10 pl-3 pr-10 bg-zinc-50 dark:bg-zinc-900/50 border-zinc-200 dark:border-zinc-800 rounded-lg focus-visible:ring-1 focus-visible:ring-zinc-400 dark:focus-visible:ring-zinc-600 transition-shadow" + disabled={isLoading} + /> + +
+
+ + {/* TOTP */} + + {needsTotp && ( + + + setForm((p) => ({ + ...p, + totp_code: e.target.value.replace(/\D/g, "") + }))} + className="h-10 text-center tracking-[0.5em] font-mono text-lg bg-zinc-50 dark:bg-zinc-900/50 border-zinc-200 dark:border-zinc-800 rounded-lg focus-visible:ring-1 focus-visible:ring-zinc-400" + disabled={isLoading} + /> + + )} + + + {/* 优雅的内嵌验证码设计 */} + {!needsTotp && ( +
+ +
+ setForm((p) => ({...p, captcha: e.target.value}))} + className="h-10 pl-3 pr-[110px] bg-zinc-50 dark:bg-zinc-900/50 border-zinc-200 dark:border-zinc-800 rounded-lg focus-visible:ring-1 focus-visible:ring-zinc-400 dark:focus-visible:ring-zinc-600 transition-shadow" + disabled={isLoading || captchaLoading} + /> + +
+
+ )} + + {/* 错误提示 - 使用非常克制的柔和边框风格 */} + + {error && ( + + +

{error}

+
+ )} +
+ + +
+
+ +
+

+ Don't have an account?{" "} + + Create one + +

+
+
+
+ ); +} \ No newline at end of file diff --git a/src/app/auth/password-reset-page.tsx b/src/app/auth/password-reset-page.tsx new file mode 100644 index 0000000..bfeac85 --- /dev/null +++ b/src/app/auth/password-reset-page.tsx @@ -0,0 +1,199 @@ +import {useState} from "react"; +import {Link} from "react-router-dom"; +import {ArrowLeft, ArrowRight, CheckCircle2, Command, Loader2, Mail, ShieldAlert} from "lucide-react"; +import {toast} from "sonner"; +import {apiUserRequestPasswordReset} from "@/client"; +import {getApiErrorMessage} from "@/lib/api-error"; +import {AuthLayout} from "@/components/auth/auth-layout"; +import {Button} from "@/components/ui/button"; +import {Input} from "@/components/ui/input"; +import {AnimatePresence, motion} from "framer-motion"; + +export function PasswordResetPage() { + const [form, setForm] = useState({email: ""}); + const [isLoading, setIsLoading] = useState(false); + const [sent, setSent] = useState(false); + const [error, setError] = useState(null); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + setError(null); + + if (!form.email.trim()) { + setError("Email address is required."); + return; + } + if (!/^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(form.email)) { + setError("Please enter a valid email address."); + return; + } + + setIsLoading(true); + try { + const resp = await apiUserRequestPasswordReset({body: {email: form.email}}); + if (resp.data?.code !== 0) { + throw new Error(resp.data?.message || "Failed to send reset email."); + } + setSent(true); + toast.success("Reset link sent!"); + } catch (err: unknown) { + const msg = getApiErrorMessage(err, "Failed to send reset email."); + setError(msg); + toast.error(msg); + } finally { + setIsLoading(false); + } + }; + + return ( + + + + {!sent ? ( + + {/* 头部 */} +
+
+ +
+
+

+ Reset password +

+

+ Enter your email and we'll send you a link to get back into your account. +

+
+
+ + {/* 卡片表单 */} +
+
+
+ + { + setForm({email: e.target.value}); + if (error) setError(null); + }} + className="h-10 bg-zinc-50/50 dark:bg-zinc-900/50 border-zinc-200 dark:border-zinc-800 rounded-xl focus-visible:ring-1 focus-visible:ring-zinc-400" + disabled={isLoading} + /> +
+ + {error && ( + + +

{error}

+
+ )} + + +
+
+ + {/* 底部链接 */} +
+ + + Back to sign in + +
+
+ ) : ( + + {/* 成功状态显示 */} +
+
+ +
+

+ Check your email +

+

+ We've sent a secure link to {form.email}. + Click the link to reset your passphrase. +

+ +
+ Return to Sign In +

+ Didn't receive the email? Check your spam folder or{" "} + +

+
+
+ +
+
+ Secure Link +
+
+ 24h Expiry +
+
+
+ )} +
+
+
+ ); +} \ No newline at end of file diff --git a/src/app/auth/register-page.tsx b/src/app/auth/register-page.tsx new file mode 100644 index 0000000..7197d6f --- /dev/null +++ b/src/app/auth/register-page.tsx @@ -0,0 +1,292 @@ +import {useCallback, useEffect, useState} from "react"; +import {Link, useNavigate} from "react-router-dom"; +import {ArrowRight, CheckCircle2, Command, Eye, EyeOff, Loader2, ShieldAlert} from "lucide-react"; +import {toast} from "sonner"; +import {apiAuthCaptcha, apiAuthRegister, type ApiResponseCaptchaResponse} from "@/client"; +import {getApiErrorMessage} from "@/lib/api-error"; +import {AuthLayout} from "@/components/auth/auth-layout"; +import {Button} from "@/components/ui/button"; +import {Input} from "@/components/ui/input"; +import {rsaEncrypt} from "@/lib/rsa"; +import {AnimatePresence, motion} from "framer-motion"; + +export function RegisterPage() { + const navigate = useNavigate(); + + const [form, setForm] = useState({ + email: "", + username: "", + password: "", + confirmPassword: "", + captcha: "", + }); + const [showPassword, setShowPassword] = useState(false); + const [isLoading, setIsLoading] = useState(false); + const [captcha, setCaptcha] = useState(null); + const [captchaLoading, setCaptchaLoading] = useState(false); + const [error, setError] = useState(null); + + const loadCaptcha = useCallback(async () => { + setCaptchaLoading(true); + try { + const resp = await apiAuthCaptcha({body: {w: 100, h: 32, dark: false, rsa: true}}); + if (resp.data?.data) setCaptcha(resp.data.data); + } catch { /* non-critical */ + } finally { + setCaptchaLoading(false); + } + }, []); + + useEffect(() => { + loadCaptcha(); + }, [loadCaptcha]); + + const validate = () => { + if (!form.email.trim()) return "Email is required"; + if (!/^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(form.email)) return "Invalid email address"; + if (!form.username.trim()) return "Username is required"; + if (form.username.length < 3) return "Username must be at least 3 characters"; + if (!form.password) return "Password is required"; + if (form.password.length < 8) return "Password must be at least 8 characters"; + if (!/[A-Z]/.test(form.password) || !/[a-z]/.test(form.password) || !/[0-9]/.test(form.password)) { + return "Password must include uppercase, lowercase, and a digit"; + } + if (form.password !== form.confirmPassword) return "Passwords do not match"; + if (!form.captcha.trim()) return "Verification code is required"; + return null; + }; + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + const validationError = validate(); + if (validationError) { + setError(validationError); + return; + } + setError(null); + setIsLoading(true); + try { + const encryptedPassword = captcha?.rsa + ? rsaEncrypt(form.password, captcha.rsa.public_key) + : form.password; + + const resp = await apiAuthRegister({ + body: { + username: form.username, + email: form.email, + password: encryptedPassword, + captcha: form.captcha, + }, + }); + + if (resp.data?.code !== 0) { + throw new Error(resp.data?.message || "Registration failed"); + } + + toast.success("Account created successfully!"); + navigate("/auth/login"); + } catch (err: any) { + const msg = getApiErrorMessage(err, "Registration failed"); + setError(msg); + toast.error(msg); + await loadCaptcha(); + setForm((p) => ({...p, captcha: ""})); + } finally { + setIsLoading(false); + } + }; + + const set = (key: keyof typeof form) => (e: React.ChangeEvent) => { + setForm((p) => ({...p, [key]: e.target.value})); + if (error) setError(null); // 输入时清除错误提示 + }; + + return ( + + + {/* 头部区域 */} +
+
+ +
+
+

+ Create an account +

+

+ Join the next generation of human-agent collaboration +

+
+
+ + {/* 表单卡片 */} +
+
+ + {/* Email */} +
+ + +
+ + {/* Username */} +
+ + +
+ + {/* Password */} +
+
+ +
+ + +
+
+
+ + +
+
+ + {/* 验证码 - 延续登录页的内嵌精致设计 */} +
+ +
+ + +
+
+ + {/* 错误提示区域 */} + + {error && ( + + +

{error}

+
+ )} +
+ + {/* 提交按钮 */} + +
+
+ +
+

+ Already have an account?{" "} + + Sign in + +

+
+
+ Encrypted +
+
+
+
+
+ ); +} \ No newline at end of file diff --git a/src/app/auth/verify-email-page.tsx b/src/app/auth/verify-email-page.tsx new file mode 100644 index 0000000..efa827b --- /dev/null +++ b/src/app/auth/verify-email-page.tsx @@ -0,0 +1,161 @@ +import {useEffect, useState} from 'react'; +import {Link, useSearchParams} from 'react-router-dom'; +import {AlertCircle, ArrowRight, Command, Loader2, MailCheck, ShieldCheck} from 'lucide-react'; +import {toast} from 'sonner'; +import {apiEmailVerify} from '@/client'; +import {AuthLayout} from '@/components/auth/auth-layout'; +import {AnimatePresence, motion} from 'framer-motion'; +import {getApiErrorMessage} from '@/lib/api-error'; + +type Status = 'idle' | 'loading' | 'success' | 'error'; + +export function VerifyEmailPage() { + const [searchParams] = useSearchParams(); + const token = searchParams.get('token'); + const [status, setStatus] = useState('idle'); + const [errorMsg, setErrorMsg] = useState(''); + + useEffect(() => { + if (!token) { + setStatus('error'); + setErrorMsg('Verification token is missing or malformed.'); + return; + } + + setStatus('loading'); + // 模拟一点延迟以展示优雅的加载状态 + const timer = setTimeout(() => { + apiEmailVerify({body: {token}}) + .then(() => { + setStatus('success'); + toast.success('Identity verified successfully.'); + }) + .catch((err: unknown) => { + setStatus('error'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const msg = getApiErrorMessage(err, 'The verification link has expired or is invalid.'); + setErrorMsg(msg); + }); + }, 800); + + return () => clearTimeout(timer); + }, [token]); + + return ( + + + {/* 统一的 Logo 头部 */} +
+
+ +
+

+ Email Verification +

+
+ + {/* 状态容器 */} +
+ + {status === 'loading' && ( + +
+
+ +
+

+ Verifying secure token... +

+ + )} + + {status === 'success' && ( + +
+ +
+

+ Identity Confirmed +

+

+ Your email has been successfully verified. You now have full access to the GitDataAI + platform. +

+ + Go to Dashboard + + +
+ )} + + {status === 'error' && ( + +
+ +
+

+ Verification Failed +

+

+ {errorMsg} +

+
+ Back to Settings +

+ Need help? Contact + engineering support +

+
+
+ )} + +
+ + {/* 底部安全背书 */} +
+
+ Encrypted Link +
+
+ SOC2 Compliant +
+
+
+ + ); +} \ No newline at end of file diff --git a/src/app/init/project.tsx b/src/app/init/project.tsx new file mode 100644 index 0000000..1084f43 --- /dev/null +++ b/src/app/init/project.tsx @@ -0,0 +1,263 @@ +import {useCallback, useEffect, useState} from 'react'; +import {useNavigate} from 'react-router-dom'; +import {useQuery} from '@tanstack/react-query'; +import {CheckCircle2, Loader2, XCircle} from 'lucide-react'; +import {toast} from 'sonner'; +import {projectCreate, projectInfo, workspaceList} from '@/client'; +import {Button} from '@/components/ui/button'; +import {Input} from '@/components/ui/input'; +import {Label} from '@/components/ui/label'; +import {Switch} from '@/components/ui/switch'; +import {Textarea} from '@/components/ui/textarea'; +import {validateName} from '@/lib/validation'; +import {InitLayout} from '@/components/init-layout'; +import {getApiErrorMessage} from '@/lib/api-error'; + +export function InitProject() { + const navigate = useNavigate(); + const [isLoading, setIsLoading] = useState(false); + + const [name, setName] = useState(''); + const [description, setDescription] = useState(''); + const [isPublic, setIsPublic] = useState(false); + const [workspaceSlug, setWorkspaceSlug] = useState(''); + + const [checkingAvailability, setCheckingAvailability] = useState(false); + const [nameAvailable, setNameAvailable] = useState(null); + const [availabilityMessage, setAvailabilityMessage] = useState(''); + + const {data: workspacesData} = useQuery({ + queryKey: ['workspaceListForProject'], + queryFn: async () => { + const resp = await workspaceList(); + return resp.data?.data ?? null; + }, + }); + + // Pre-select first workspace if available + useEffect(() => { + if (workspacesData?.workspaces?.length) { + setWorkspaceSlug(workspacesData.workspaces[0].slug); + } + }, [workspacesData]); + + const checkNameAvailability = useCallback(async () => { + if (!name.trim()) { + setNameAvailable(null); + setAvailabilityMessage(''); + return; + } + + const result = validateName(name.trim()); + if (!result.valid) { + setNameAvailable(false); + setAvailabilityMessage(result.message); + return; + } + + setCheckingAvailability(true); + try { + const lower = name.trim().toLowerCase(); + await projectInfo({path: {project_name: lower}}); + setNameAvailable(false); + setAvailabilityMessage('Project name already exists'); + } catch (err: unknown) { + const status = (err as any)?.response?.status; + if (status === 404) { + setNameAvailable(true); + setAvailabilityMessage('Project name is available'); + } else { + setNameAvailable(false); + setAvailabilityMessage('Failed to check availability'); + } + } finally { + setCheckingAvailability(false); + } + }, [name]); + + useEffect(() => { + const timer = setTimeout(() => { + checkNameAvailability(); + }, 500); + return () => clearTimeout(timer); + }, [checkNameAvailability]); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + if (!name.trim()) { + toast.error('Project name is required'); + return; + } + const result = validateName(name.trim()); + if (!result.valid) { + toast.error(result.message); + return; + } + + setIsLoading(true); + try { + const resp = await projectCreate({ + body: { + name: name.trim().toLowerCase(), + description: description.trim() || null, + is_public: isPublic, + workspace_slug: workspaceSlug || null, + }, + }); + if (resp.data?.code !== 0) { + toast.error(resp.data?.message || 'Failed to create project'); + return; + } + const projectName = resp.data?.data?.project?.name; + const wsSlug = resp.data?.data?.project?.workspace_id ? workspaceSlug : undefined; + toast.success('Project created successfully!'); + if (wsSlug) { + navigate(`/w/${wsSlug}/projects`); + } else { + navigate(projectName ? `/project/${projectName}` : '/'); + } + } catch (err: unknown) { + toast.error(getApiErrorMessage(err, 'Failed to create project')); + } finally { + setIsLoading(false); + } + }; + + const canSubmit = + !isLoading && + !!name.trim() && + nameAvailable === true && + !checkingAvailability; + + return ( + +
+
+
+

Create New Project

+

+ Create a new project to organize your repositories +

+
+ +
+ +
+
+ +
+ setName(e.target.value)} + disabled={isLoading} + maxLength={100} + className="pr-10" + /> +
+ {checkingAvailability && ( + + )} + {!checkingAvailability && nameAvailable === true && ( + + )} + {!checkingAvailability && nameAvailable === false && ( + + )} +
+
+ {availabilityMessage && ( +

+ {availabilityMessage} +

+ )} +
+ +
+ +