Browse Source

feat: add AI types and API client methods (incl SSE streaming)

- AI types: AIModelInfo, AIConversation, AIChatMessage, AIQuotaInfo
- chatStream() AsyncGenerator for SSE streaming
- Conversation CRUD + model list + quota API methods
master
dark 1 month ago
parent
commit
45f614f61a
  1. 98
      frontend/react-shadcn/pc/src/services/api.ts
  2. 56
      frontend/react-shadcn/pc/src/types/index.ts

98
frontend/react-shadcn/pc/src/services/api.ts

@ -27,6 +27,11 @@ import type {
UpdateOrgRequest,
OrgMember,
UserOrgInfo,
AIModelInfo,
AIConversation,
AIChatMessage,
AIQuotaInfo,
AIChatCompletionRequest,
} from '@/types'
const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8888/api/v1'
@ -166,14 +171,18 @@ class ApiClient {
// Profile
async getProfile(): Promise<ApiResponse<Profile>> {
return this.request<ApiResponse<Profile>>('/profile/me')
const rawData = await this.request<Profile>('/profile/me')
if ('success' in rawData) return rawData as unknown as ApiResponse<Profile>
return { code: 200, message: 'success', success: true, data: rawData }
}
async updateProfile(data: UpdateProfileRequest): Promise<ApiResponse<Profile>> {
return this.request<ApiResponse<Profile>>('/profile/me', {
const rawData = await this.request<Profile>('/profile/me', {
method: 'PUT',
body: JSON.stringify(data),
})
if ('success' in rawData) return rawData as unknown as ApiResponse<Profile>
return { code: 200, message: 'success', success: true, data: rawData }
}
async changePassword(data: ChangePasswordRequest): Promise<ApiResponse<void>> {
@ -396,6 +405,91 @@ class ApiClient {
return rawData
}
// AI Chat - SSE Streaming
async *chatStream(req: AIChatCompletionRequest): AsyncGenerator<string> {
const url = `${API_BASE_URL}/ai/chat/completions`
const headers: Record<string, string> = {
'Content-Type': 'application/json',
}
if (this.token) {
headers['Authorization'] = `Bearer ${this.token}`
}
const response = await fetch(url, {
method: 'POST',
headers,
body: JSON.stringify({ ...req, stream: true }),
})
if (!response.ok) {
const err = await response.json().catch(() => ({ message: 'Stream request failed' }))
throw new Error(err.message || 'Stream request failed')
}
const reader = response.body!.getReader()
const decoder = new TextDecoder()
let buffer = ''
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split('\n')
buffer = lines.pop() || ''
for (const line of lines) {
const trimmed = line.trim()
if (trimmed.startsWith('data: ') && trimmed !== 'data: [DONE]') {
yield trimmed.slice(6)
}
}
}
}
// AI Chat - Non-streaming
async chatCompletion(req: AIChatCompletionRequest): Promise<any> {
return this.request<any>('/ai/chat/completions', {
method: 'POST',
body: JSON.stringify({ ...req, stream: false }),
})
}
// AI Models
async getAIModels(): Promise<{ list: AIModelInfo[] }> {
return this.request<{ list: AIModelInfo[] }>('/ai/models')
}
// AI Conversations
async getAIConversations(page: number = 1, pageSize: number = 20): Promise<{ list: AIConversation[]; total: number }> {
return this.request<{ list: AIConversation[]; total: number }>(`/ai/conversations?page=${page}&pageSize=${pageSize}`)
}
async createAIConversation(modelId?: string, title?: string): Promise<AIConversation> {
return this.request<AIConversation>('/ai/conversation', {
method: 'POST',
body: JSON.stringify({ modelId, title }),
})
}
async getAIConversation(id: number): Promise<{ conversation: AIConversation; messages: AIChatMessage[] }> {
return this.request<{ conversation: AIConversation; messages: AIChatMessage[] }>(`/ai/conversation/${id}`)
}
async updateAIConversation(id: number, title: string): Promise<AIConversation> {
return this.request<AIConversation>(`/ai/conversation/${id}`, {
method: 'PUT',
body: JSON.stringify({ title }),
})
}
async deleteAIConversation(id: number): Promise<void> {
await this.request<void>(`/ai/conversation/${id}`, { method: 'DELETE' })
}
// AI Quota
async getAIQuota(): Promise<AIQuotaInfo> {
return this.request<AIQuotaInfo>('/ai/quota/me')
}
// Health check
async healthCheck(): Promise<{ status: string }> {
try {

56
frontend/react-shadcn/pc/src/types/index.ts

@ -283,3 +283,59 @@ export interface UserOrgInfo {
roleName: string
roleCode: string
}
// ============ AI Types ============
export interface AIModelInfo {
id: number
providerId: number
providerName: string
modelId: string
displayName: string
inputPrice: number
outputPrice: number
maxTokens: number
contextWindow: number
supportsStream: boolean
supportsVision: boolean
}
export interface AIConversation {
id: number
title: string
modelId: string
providerId: number
totalTokens: number
totalCost: number
isArchived: boolean
createdAt: string
updatedAt: string
}
export interface AIChatMessage {
id: number
conversationId: number
role: 'user' | 'assistant' | 'system'
content: string
tokenCount: number
cost: number
modelId: string
latencyMs: number
createdAt: string
}
export interface AIQuotaInfo {
balance: number
totalRecharged: number
totalConsumed: number
frozenAmount: number
}
export interface AIChatCompletionRequest {
model: string
messages: { role: string; content: string }[]
stream?: boolean
max_tokens?: number
temperature?: number
conversation_id?: number
}

Loading…
Cancel
Save