From 45f614f61a3cdb042af4d7db598de9d5f40ff7b4 Mon Sep 17 00:00:00 2001 From: dark Date: Sat, 14 Feb 2026 22:25:25 +0800 Subject: [PATCH] feat: add AI types and API client methods (incl SSE streaming) - AI types: AIModelInfo, AIConversation, AIChatMessage, AIQuotaInfo - chatStream() AsyncGenerator for SSE streaming - Conversation CRUD + model list + quota API methods --- frontend/react-shadcn/pc/src/services/api.ts | 98 +++++++++++++++++++- frontend/react-shadcn/pc/src/types/index.ts | 56 +++++++++++ 2 files changed, 152 insertions(+), 2 deletions(-) diff --git a/frontend/react-shadcn/pc/src/services/api.ts b/frontend/react-shadcn/pc/src/services/api.ts index ae6e153..0819503 100644 --- a/frontend/react-shadcn/pc/src/services/api.ts +++ b/frontend/react-shadcn/pc/src/services/api.ts @@ -27,6 +27,11 @@ import type { UpdateOrgRequest, OrgMember, UserOrgInfo, + AIModelInfo, + AIConversation, + AIChatMessage, + AIQuotaInfo, + AIChatCompletionRequest, } from '@/types' const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8888/api/v1' @@ -166,14 +171,18 @@ class ApiClient { // Profile async getProfile(): Promise> { - return this.request>('/profile/me') + const rawData = await this.request('/profile/me') + if ('success' in rawData) return rawData as unknown as ApiResponse + return { code: 200, message: 'success', success: true, data: rawData } } async updateProfile(data: UpdateProfileRequest): Promise> { - return this.request>('/profile/me', { + const rawData = await this.request('/profile/me', { method: 'PUT', body: JSON.stringify(data), }) + if ('success' in rawData) return rawData as unknown as ApiResponse + return { code: 200, message: 'success', success: true, data: rawData } } async changePassword(data: ChangePasswordRequest): Promise> { @@ -396,6 +405,91 @@ class ApiClient { return rawData } + // AI Chat - SSE Streaming + async *chatStream(req: AIChatCompletionRequest): AsyncGenerator { + const url = `${API_BASE_URL}/ai/chat/completions` + const headers: Record = { + 'Content-Type': 'application/json', + } + if (this.token) { + headers['Authorization'] = `Bearer ${this.token}` + } + + const response = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify({ ...req, stream: true }), + }) + + if (!response.ok) { + const err = await response.json().catch(() => ({ message: 'Stream request failed' })) + throw new Error(err.message || 'Stream request failed') + } + + const reader = response.body!.getReader() + const decoder = new TextDecoder() + let buffer = '' + + while (true) { + const { done, value } = await reader.read() + if (done) break + buffer += decoder.decode(value, { stream: true }) + const lines = buffer.split('\n') + buffer = lines.pop() || '' + for (const line of lines) { + const trimmed = line.trim() + if (trimmed.startsWith('data: ') && trimmed !== 'data: [DONE]') { + yield trimmed.slice(6) + } + } + } + } + + // AI Chat - Non-streaming + async chatCompletion(req: AIChatCompletionRequest): Promise { + return this.request('/ai/chat/completions', { + method: 'POST', + body: JSON.stringify({ ...req, stream: false }), + }) + } + + // AI Models + async getAIModels(): Promise<{ list: AIModelInfo[] }> { + return this.request<{ list: AIModelInfo[] }>('/ai/models') + } + + // AI Conversations + async getAIConversations(page: number = 1, pageSize: number = 20): Promise<{ list: AIConversation[]; total: number }> { + return this.request<{ list: AIConversation[]; total: number }>(`/ai/conversations?page=${page}&pageSize=${pageSize}`) + } + + async createAIConversation(modelId?: string, title?: string): Promise { + return this.request('/ai/conversation', { + method: 'POST', + body: JSON.stringify({ modelId, title }), + }) + } + + async getAIConversation(id: number): Promise<{ conversation: AIConversation; messages: AIChatMessage[] }> { + return this.request<{ conversation: AIConversation; messages: AIChatMessage[] }>(`/ai/conversation/${id}`) + } + + async updateAIConversation(id: number, title: string): Promise { + return this.request(`/ai/conversation/${id}`, { + method: 'PUT', + body: JSON.stringify({ title }), + }) + } + + async deleteAIConversation(id: number): Promise { + await this.request(`/ai/conversation/${id}`, { method: 'DELETE' }) + } + + // AI Quota + async getAIQuota(): Promise { + return this.request('/ai/quota/me') + } + // Health check async healthCheck(): Promise<{ status: string }> { try { diff --git a/frontend/react-shadcn/pc/src/types/index.ts b/frontend/react-shadcn/pc/src/types/index.ts index 0c8a26a..6d589e6 100644 --- a/frontend/react-shadcn/pc/src/types/index.ts +++ b/frontend/react-shadcn/pc/src/types/index.ts @@ -283,3 +283,59 @@ export interface UserOrgInfo { roleName: string roleCode: string } + +// ============ AI Types ============ + +export interface AIModelInfo { + id: number + providerId: number + providerName: string + modelId: string + displayName: string + inputPrice: number + outputPrice: number + maxTokens: number + contextWindow: number + supportsStream: boolean + supportsVision: boolean +} + +export interface AIConversation { + id: number + title: string + modelId: string + providerId: number + totalTokens: number + totalCost: number + isArchived: boolean + createdAt: string + updatedAt: string +} + +export interface AIChatMessage { + id: number + conversationId: number + role: 'user' | 'assistant' | 'system' + content: string + tokenCount: number + cost: number + modelId: string + latencyMs: number + createdAt: string +} + +export interface AIQuotaInfo { + balance: number + totalRecharged: number + totalConsumed: number + frozenAmount: number +} + +export interface AIChatCompletionRequest { + model: string + messages: { role: string; content: string }[] + stream?: boolean + max_tokens?: number + temperature?: number + conversation_id?: number +}