Browse Source

feat: add AI API definitions and goctl-generated code

- ai.api: chat completions, conversations CRUD, models, quota
- 8 handlers + 8 logic stubs generated
- Routes registered with Cors,Log,Auth middleware
master
dark 1 month ago
parent
commit
e672c8eb76
  1. 133
      backend/api/ai.api
  2. 41
      backend/base.api
  3. 32
      backend/internal/handler/ai/aichatcompletionshandler.go
  4. 32
      backend/internal/handler/ai/aiconversationcreatehandler.go
  5. 32
      backend/internal/handler/ai/aiconversationdeletehandler.go
  6. 32
      backend/internal/handler/ai/aiconversationgethandler.go
  7. 32
      backend/internal/handler/ai/aiconversationlisthandler.go
  8. 32
      backend/internal/handler/ai/aiconversationupdatehandler.go
  9. 25
      backend/internal/handler/ai/aimodellisthandler.go
  10. 25
      backend/internal/handler/ai/aiquotamehandler.go
  11. 58
      backend/internal/handler/routes.go
  12. 34
      backend/internal/logic/ai/aichatcompletionslogic.go
  13. 34
      backend/internal/logic/ai/aiconversationcreatelogic.go
  14. 34
      backend/internal/logic/ai/aiconversationdeletelogic.go
  15. 34
      backend/internal/logic/ai/aiconversationgetlogic.go
  16. 34
      backend/internal/logic/ai/aiconversationlistlogic.go
  17. 34
      backend/internal/logic/ai/aiconversationupdatelogic.go
  18. 34
      backend/internal/logic/ai/aimodellistlogic.go
  19. 34
      backend/internal/logic/ai/aiquotamelogic.go
  20. 116
      backend/internal/types/types.go

133
backend/api/ai.api

@ -0,0 +1,133 @@
syntax = "v1"
// ========== AI Chat Types ==========
type (
AIChatMessage {
Role string `json:"role"`
Content string `json:"content"`
}
AIChatCompletionRequest {
Model string `json:"model"`
Messages []AIChatMessage `json:"messages"`
Stream bool `json:"stream,optional"`
MaxTokens int `json:"max_tokens,optional"`
Temperature float64 `json:"temperature,optional"`
ConversationId int64 `json:"conversation_id,optional,string"`
}
AIChatCompletionChoice {
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
Message AIChatMessage `json:"message"`
}
AIChatCompletionUsage {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
AIChatCompletionResponse {
Id string `json:"id"`
Object string `json:"object"`
Model string `json:"model"`
Choices []AIChatCompletionChoice `json:"choices"`
Usage AIChatCompletionUsage `json:"usage"`
}
)
// ========== Conversation Types ==========
type (
AIConversationInfo {
Id int64 `json:"id,string"`
Title string `json:"title"`
ModelId string `json:"modelId"`
ProviderId int64 `json:"providerId,string"`
TotalTokens int64 `json:"totalTokens"`
TotalCost float64 `json:"totalCost"`
IsArchived bool `json:"isArchived"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
}
AIMessageInfo {
Id int64 `json:"id,string"`
ConversationId int64 `json:"conversationId,string"`
Role string `json:"role"`
Content string `json:"content"`
TokenCount int `json:"tokenCount"`
Cost float64 `json:"cost"`
ModelId string `json:"modelId"`
LatencyMs int `json:"latencyMs"`
CreatedAt string `json:"createdAt"`
}
AIConversationListRequest {
Page int64 `form:"page,optional,default=1"`
PageSize int64 `form:"pageSize,optional,default=20"`
}
AIConversationListResponse {
List []AIConversationInfo `json:"list"`
Total int64 `json:"total"`
}
AIConversationCreateRequest {
Title string `json:"title,optional"`
ModelId string `json:"modelId,optional"`
}
AIConversationGetRequest {
Id int64 `path:"id"`
}
AIConversationDetailResponse {
Conversation AIConversationInfo `json:"conversation"`
Messages []AIMessageInfo `json:"messages"`
}
AIConversationUpdateRequest {
Id int64 `path:"id"`
Title string `json:"title"`
}
AIConversationDeleteRequest {
Id int64 `path:"id"`
}
)
// ========== Model Types ==========
type (
AIModelInfo {
Id int64 `json:"id,string"`
ProviderId int64 `json:"providerId,string"`
ProviderName string `json:"providerName"`
ModelId string `json:"modelId"`
DisplayName string `json:"displayName"`
InputPrice float64 `json:"inputPrice"`
OutputPrice float64 `json:"outputPrice"`
MaxTokens int `json:"maxTokens"`
ContextWindow int `json:"contextWindow"`
SupportsStream bool `json:"supportsStream"`
SupportsVision bool `json:"supportsVision"`
}
AIModelListResponse {
List []AIModelInfo `json:"list"`
}
)
// ========== Quota Types ==========
type (
AIQuotaInfo {
Balance float64 `json:"balance"`
TotalRecharged float64 `json:"totalRecharged"`
TotalConsumed float64 `json:"totalConsumed"`
FrozenAmount float64 `json:"frozenAmount"`
}
)

41
backend/base.api

@ -14,6 +14,7 @@ import "api/file.api"
import "api/menu.api" import "api/menu.api"
import "api/role.api" import "api/role.api"
import "api/organization.api" import "api/organization.api"
import "api/ai.api"
// ========== 通用响应类型 ========== // ========== 通用响应类型 ==========
type ( type (
@ -294,3 +295,43 @@ service base-api {
delete /organization/:id/member/:userId (RemoveOrgMemberRequest) returns (Response) delete /organization/:id/member/:userId (RemoveOrgMemberRequest) returns (Response)
} }
// ========== AI 智能对话 ==========
@server (
prefix: /api/v1
group: ai
middleware: Cors,Log,Auth
)
service base-api {
@doc "AI 对话补全"
@handler AiChatCompletions
post /ai/chat/completions (AIChatCompletionRequest)
@doc "获取对话列表"
@handler AiConversationList
get /ai/conversations (AIConversationListRequest) returns (AIConversationListResponse)
@doc "创建对话"
@handler AiConversationCreate
post /ai/conversation (AIConversationCreateRequest) returns (AIConversationInfo)
@doc "获取对话详情"
@handler AiConversationGet
get /ai/conversation/:id (AIConversationGetRequest) returns (AIConversationDetailResponse)
@doc "更新对话"
@handler AiConversationUpdate
put /ai/conversation/:id (AIConversationUpdateRequest) returns (AIConversationInfo)
@doc "删除对话"
@handler AiConversationDelete
delete /ai/conversation/:id (AIConversationDeleteRequest) returns (Response)
@doc "获取模型列表"
@handler AiModelList
get /ai/models returns (AIModelListResponse)
@doc "获取我的配额"
@handler AiQuotaMe
get /ai/quota/me returns (AIQuotaInfo)
}

32
backend/internal/handler/ai/aichatcompletionshandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// AI 对话补全
func AiChatCompletionsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIChatCompletionRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiChatCompletionsLogic(r.Context(), svcCtx)
err := l.AiChatCompletions(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.Ok(w)
}
}
}

32
backend/internal/handler/ai/aiconversationcreatehandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 创建对话
func AiConversationCreateHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIConversationCreateRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiConversationCreateLogic(r.Context(), svcCtx)
resp, err := l.AiConversationCreate(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

32
backend/internal/handler/ai/aiconversationdeletehandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 删除对话
func AiConversationDeleteHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIConversationDeleteRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiConversationDeleteLogic(r.Context(), svcCtx)
resp, err := l.AiConversationDelete(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

32
backend/internal/handler/ai/aiconversationgethandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 获取对话详情
func AiConversationGetHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIConversationGetRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiConversationGetLogic(r.Context(), svcCtx)
resp, err := l.AiConversationGet(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

32
backend/internal/handler/ai/aiconversationlisthandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 获取对话列表
func AiConversationListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIConversationListRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiConversationListLogic(r.Context(), svcCtx)
resp, err := l.AiConversationList(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

32
backend/internal/handler/ai/aiconversationupdatehandler.go

@ -0,0 +1,32 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 更新对话
func AiConversationUpdateHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.AIConversationUpdateRequest
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := ai.NewAiConversationUpdateLogic(r.Context(), svcCtx)
resp, err := l.AiConversationUpdate(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

25
backend/internal/handler/ai/aimodellisthandler.go

@ -0,0 +1,25 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 获取模型列表
func AiModelListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
l := ai.NewAiModelListLogic(r.Context(), svcCtx)
resp, err := l.AiModelList()
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

25
backend/internal/handler/ai/aiquotamehandler.go

@ -0,0 +1,25 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"net/http"
"github.com/youruser/base/internal/logic/ai"
"github.com/youruser/base/internal/svc"
"github.com/zeromicro/go-zero/rest/httpx"
)
// 获取我的配额
func AiQuotaMeHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
l := ai.NewAiQuotaMeLogic(r.Context(), svcCtx)
resp, err := l.AiQuotaMe()
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

58
backend/internal/handler/routes.go

@ -6,6 +6,7 @@ package handler
import ( import (
"net/http" "net/http"
ai "github.com/youruser/base/internal/handler/ai"
auth "github.com/youruser/base/internal/handler/auth" auth "github.com/youruser/base/internal/handler/auth"
dashboard "github.com/youruser/base/internal/handler/dashboard" dashboard "github.com/youruser/base/internal/handler/dashboard"
file "github.com/youruser/base/internal/handler/file" file "github.com/youruser/base/internal/handler/file"
@ -20,6 +21,63 @@ import (
) )
func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) { func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) {
server.AddRoutes(
rest.WithMiddlewares(
[]rest.Middleware{serverCtx.Cors, serverCtx.Log, serverCtx.Auth},
[]rest.Route{
{
// AI 对话补全
Method: http.MethodPost,
Path: "/ai/chat/completions",
Handler: ai.AiChatCompletionsHandler(serverCtx),
},
{
// 创建对话
Method: http.MethodPost,
Path: "/ai/conversation",
Handler: ai.AiConversationCreateHandler(serverCtx),
},
{
// 获取对话详情
Method: http.MethodGet,
Path: "/ai/conversation/:id",
Handler: ai.AiConversationGetHandler(serverCtx),
},
{
// 更新对话
Method: http.MethodPut,
Path: "/ai/conversation/:id",
Handler: ai.AiConversationUpdateHandler(serverCtx),
},
{
// 删除对话
Method: http.MethodDelete,
Path: "/ai/conversation/:id",
Handler: ai.AiConversationDeleteHandler(serverCtx),
},
{
// 获取对话列表
Method: http.MethodGet,
Path: "/ai/conversations",
Handler: ai.AiConversationListHandler(serverCtx),
},
{
// 获取模型列表
Method: http.MethodGet,
Path: "/ai/models",
Handler: ai.AiModelListHandler(serverCtx),
},
{
// 获取我的配额
Method: http.MethodGet,
Path: "/ai/quota/me",
Handler: ai.AiQuotaMeHandler(serverCtx),
},
}...,
),
rest.WithPrefix("/api/v1"),
)
server.AddRoutes( server.AddRoutes(
rest.WithMiddlewares( rest.WithMiddlewares(
[]rest.Middleware{serverCtx.Cors, serverCtx.Log}, []rest.Middleware{serverCtx.Cors, serverCtx.Log},

34
backend/internal/logic/ai/aichatcompletionslogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiChatCompletionsLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// AI 对话补全
func NewAiChatCompletionsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiChatCompletionsLogic {
return &AiChatCompletionsLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiChatCompletionsLogic) AiChatCompletions(req *types.AIChatCompletionRequest) error {
// todo: add your logic here and delete this line
return nil
}

34
backend/internal/logic/ai/aiconversationcreatelogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiConversationCreateLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 创建对话
func NewAiConversationCreateLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiConversationCreateLogic {
return &AiConversationCreateLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiConversationCreateLogic) AiConversationCreate(req *types.AIConversationCreateRequest) (resp *types.AIConversationInfo, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aiconversationdeletelogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiConversationDeleteLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 删除对话
func NewAiConversationDeleteLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiConversationDeleteLogic {
return &AiConversationDeleteLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiConversationDeleteLogic) AiConversationDelete(req *types.AIConversationDeleteRequest) (resp *types.Response, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aiconversationgetlogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiConversationGetLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 获取对话详情
func NewAiConversationGetLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiConversationGetLogic {
return &AiConversationGetLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiConversationGetLogic) AiConversationGet(req *types.AIConversationGetRequest) (resp *types.AIConversationDetailResponse, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aiconversationlistlogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiConversationListLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 获取对话列表
func NewAiConversationListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiConversationListLogic {
return &AiConversationListLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiConversationListLogic) AiConversationList(req *types.AIConversationListRequest) (resp *types.AIConversationListResponse, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aiconversationupdatelogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiConversationUpdateLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 更新对话
func NewAiConversationUpdateLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiConversationUpdateLogic {
return &AiConversationUpdateLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiConversationUpdateLogic) AiConversationUpdate(req *types.AIConversationUpdateRequest) (resp *types.AIConversationInfo, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aimodellistlogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiModelListLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 获取模型列表
func NewAiModelListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiModelListLogic {
return &AiModelListLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiModelListLogic) AiModelList() (resp *types.AIModelListResponse, err error) {
// todo: add your logic here and delete this line
return
}

34
backend/internal/logic/ai/aiquotamelogic.go

@ -0,0 +1,34 @@
// Code scaffolded by goctl. Safe to edit.
// goctl 1.9.2
package ai
import (
"context"
"github.com/youruser/base/internal/svc"
"github.com/youruser/base/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type AiQuotaMeLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
// 获取我的配额
func NewAiQuotaMeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AiQuotaMeLogic {
return &AiQuotaMeLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *AiQuotaMeLogic) AiQuotaMe() (resp *types.AIQuotaInfo, err error) {
// todo: add your logic here and delete this line
return
}

116
backend/internal/types/types.go

@ -3,6 +3,122 @@
package types package types
type AIChatCompletionChoice struct {
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
Message AIChatMessage `json:"message"`
}
type AIChatCompletionRequest struct {
Model string `json:"model"`
Messages []AIChatMessage `json:"messages"`
Stream bool `json:"stream,optional"`
MaxTokens int `json:"max_tokens,optional"`
Temperature float64 `json:"temperature,optional"`
ConversationId int64 `json:"conversation_id,optional,string"`
}
type AIChatCompletionResponse struct {
Id string `json:"id"`
Object string `json:"object"`
Model string `json:"model"`
Choices []AIChatCompletionChoice `json:"choices"`
Usage AIChatCompletionUsage `json:"usage"`
}
type AIChatCompletionUsage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
type AIChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type AIConversationCreateRequest struct {
Title string `json:"title,optional"`
ModelId string `json:"modelId,optional"`
}
type AIConversationDeleteRequest struct {
Id int64 `path:"id"`
}
type AIConversationDetailResponse struct {
Conversation AIConversationInfo `json:"conversation"`
Messages []AIMessageInfo `json:"messages"`
}
type AIConversationGetRequest struct {
Id int64 `path:"id"`
}
type AIConversationInfo struct {
Id int64 `json:"id,string"`
Title string `json:"title"`
ModelId string `json:"modelId"`
ProviderId int64 `json:"providerId,string"`
TotalTokens int64 `json:"totalTokens"`
TotalCost float64 `json:"totalCost"`
IsArchived bool `json:"isArchived"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
}
type AIConversationListRequest struct {
Page int64 `form:"page,optional,default=1"`
PageSize int64 `form:"pageSize,optional,default=20"`
}
type AIConversationListResponse struct {
List []AIConversationInfo `json:"list"`
Total int64 `json:"total"`
}
type AIConversationUpdateRequest struct {
Id int64 `path:"id"`
Title string `json:"title"`
}
type AIMessageInfo struct {
Id int64 `json:"id,string"`
ConversationId int64 `json:"conversationId,string"`
Role string `json:"role"`
Content string `json:"content"`
TokenCount int `json:"tokenCount"`
Cost float64 `json:"cost"`
ModelId string `json:"modelId"`
LatencyMs int `json:"latencyMs"`
CreatedAt string `json:"createdAt"`
}
type AIModelInfo struct {
Id int64 `json:"id,string"`
ProviderId int64 `json:"providerId,string"`
ProviderName string `json:"providerName"`
ModelId string `json:"modelId"`
DisplayName string `json:"displayName"`
InputPrice float64 `json:"inputPrice"`
OutputPrice float64 `json:"outputPrice"`
MaxTokens int `json:"maxTokens"`
ContextWindow int `json:"contextWindow"`
SupportsStream bool `json:"supportsStream"`
SupportsVision bool `json:"supportsVision"`
}
type AIModelListResponse struct {
List []AIModelInfo `json:"list"`
}
type AIQuotaInfo struct {
Balance float64 `json:"balance"`
TotalRecharged float64 `json:"totalRecharged"`
TotalConsumed float64 `json:"totalConsumed"`
FrozenAmount float64 `json:"frozenAmount"`
}
type Activity struct { type Activity struct {
Id int64 `json:"id"` // 记录ID Id int64 `json:"id"` // 记录ID
User string `json:"user"` // 用户邮箱 User string `json:"user"` // 用户邮箱

Loading…
Cancel
Save