You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
239 lines
5.7 KiB
239 lines
5.7 KiB
package ai
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
)
|
|
|
|
const AliyunBaseURL = "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
|
const AliyunAppURLTemplate = "https://dashscope.aliyuncs.com/api/v2/apps/agent/%s/compatible-mode/v1"
|
|
|
|
type AliyunClient struct {
|
|
apiKey string
|
|
model string
|
|
appID string
|
|
baseURL string
|
|
}
|
|
|
|
func NewAliyunClient(cfg *Config) *AliyunClient {
|
|
model := cfg.Model
|
|
if model == "" {
|
|
model = "qwen-turbo"
|
|
}
|
|
|
|
client := &AliyunClient{
|
|
apiKey: cfg.APIKey,
|
|
model: model,
|
|
appID: cfg.AppID,
|
|
}
|
|
|
|
if cfg.AppID != "" {
|
|
client.baseURL = fmt.Sprintf(AliyunAppURLTemplate, cfg.AppID)
|
|
} else {
|
|
client.baseURL = AliyunBaseURL
|
|
}
|
|
|
|
return client
|
|
}
|
|
|
|
type aliyunRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []Message `json:"messages"`
|
|
Stream bool `json:"stream,omitempty"`
|
|
EnableThinking bool `json:"enable_thinking,omitempty"`
|
|
ThinkingBudget int `json:"thinking_budget,omitempty"`
|
|
}
|
|
|
|
type aliyunResponse struct {
|
|
ID string `json:"id"`
|
|
Choices []struct {
|
|
Index int `json:"index"`
|
|
Message struct {
|
|
Role string `json:"role"`
|
|
Content string `json:"content"`
|
|
ReasoningContent string `json:"reasoning_content"`
|
|
} `json:"message"`
|
|
Delta struct {
|
|
Content string `json:"content"`
|
|
ReasoningContent string `json:"reasoning_content"`
|
|
} `json:"delta"`
|
|
FinishReason string `json:"finish_reason"`
|
|
} `json:"choices"`
|
|
Error *struct {
|
|
Message string `json:"message"`
|
|
Code string `json:"code"`
|
|
} `json:"error"`
|
|
}
|
|
|
|
func (c *AliyunClient) Chat(ctx context.Context, messages []Message) (string, error) {
|
|
if c.apiKey == "" {
|
|
return "", fmt.Errorf("阿里云 API Key 未配置")
|
|
}
|
|
|
|
filteredMessages := messages
|
|
if c.appID != "" {
|
|
filteredMessages = filterSystemMessages(messages)
|
|
}
|
|
|
|
reqBody := aliyunRequest{
|
|
Model: c.model,
|
|
Messages: filteredMessages,
|
|
Stream: false,
|
|
}
|
|
|
|
body, _ := json.Marshal(reqBody)
|
|
req, err := http.NewRequestWithContext(ctx, "POST", c.baseURL+"/chat/completions", bytes.NewReader(body))
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+c.apiKey)
|
|
|
|
resp, err := http.DefaultClient.Do(req)
|
|
if err != nil {
|
|
return "", fmt.Errorf("调用AI服务失败: %v", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
var result aliyunResponse
|
|
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
|
return "", fmt.Errorf("解析AI响应失败: %v", err)
|
|
}
|
|
|
|
if result.Error != nil {
|
|
return "", fmt.Errorf("AI服务错误: %s", result.Error.Message)
|
|
}
|
|
|
|
if len(result.Choices) == 0 {
|
|
return "", fmt.Errorf("AI未返回有效响应")
|
|
}
|
|
|
|
return result.Choices[0].Message.Content, nil
|
|
}
|
|
|
|
func (c *AliyunClient) ChatStream(ctx context.Context, messages []Message, writer io.Writer) error {
|
|
if c.apiKey == "" {
|
|
return fmt.Errorf("阿里云 API Key 未配置")
|
|
}
|
|
|
|
filteredMessages := messages
|
|
if c.appID != "" {
|
|
filteredMessages = filterSystemMessages(messages)
|
|
}
|
|
|
|
// 启用思考过程(与原 server 一致)
|
|
reqBody := aliyunRequest{
|
|
Model: c.model,
|
|
Messages: filteredMessages,
|
|
Stream: true,
|
|
EnableThinking: true, // 启用思考过程
|
|
ThinkingBudget: 2048, // 思考过程最大 token 数
|
|
}
|
|
|
|
body, _ := json.Marshal(reqBody)
|
|
req, err := http.NewRequestWithContext(ctx, "POST", c.baseURL+"/chat/completions", bytes.NewReader(body))
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
req.Header.Set("Authorization", "Bearer "+c.apiKey)
|
|
|
|
resp, err := http.DefaultClient.Do(req)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != 200 {
|
|
respBody, _ := io.ReadAll(resp.Body)
|
|
return fmt.Errorf("AI服务返回错误: %s", string(respBody))
|
|
}
|
|
|
|
reader := bufio.NewReader(resp.Body)
|
|
isThinking := false
|
|
|
|
for {
|
|
line, err := reader.ReadString('\n')
|
|
if err == io.EOF {
|
|
break
|
|
}
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
line = strings.TrimSpace(line)
|
|
if strings.HasPrefix(line, "data:") {
|
|
data := strings.TrimPrefix(line, "data:")
|
|
data = strings.TrimSpace(data)
|
|
if data == "[DONE]" {
|
|
break
|
|
}
|
|
|
|
var streamResp aliyunResponse
|
|
if err := json.Unmarshal([]byte(data), &streamResp); err != nil {
|
|
continue
|
|
}
|
|
|
|
if len(streamResp.Choices) > 0 {
|
|
choice := streamResp.Choices[0]
|
|
|
|
if choice.Delta.ReasoningContent != "" {
|
|
if !isThinking {
|
|
sseData := "data: {\"type\":\"thinking_start\"}\n\n"
|
|
writer.Write([]byte(sseData))
|
|
if flusher, ok := writer.(http.Flusher); ok {
|
|
flusher.Flush()
|
|
}
|
|
isThinking = true
|
|
}
|
|
sseData := fmt.Sprintf("data: {\"type\":\"thinking\",\"content\":%s}\n\n", jsonEscape(choice.Delta.ReasoningContent))
|
|
writer.Write([]byte(sseData))
|
|
if flusher, ok := writer.(http.Flusher); ok {
|
|
flusher.Flush()
|
|
}
|
|
}
|
|
|
|
if choice.Delta.Content != "" {
|
|
if isThinking {
|
|
sseData := "data: {\"type\":\"thinking_end\"}\n\n"
|
|
writer.Write([]byte(sseData))
|
|
if flusher, ok := writer.(http.Flusher); ok {
|
|
flusher.Flush()
|
|
}
|
|
isThinking = false
|
|
}
|
|
sseData := fmt.Sprintf("data: {\"type\":\"content\",\"content\":%s}\n\n", jsonEscape(choice.Delta.Content))
|
|
writer.Write([]byte(sseData))
|
|
if flusher, ok := writer.(http.Flusher); ok {
|
|
flusher.Flush()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func jsonEscape(s string) string {
|
|
b, _ := json.Marshal(s)
|
|
return string(b)
|
|
}
|
|
|
|
func filterSystemMessages(messages []Message) []Message {
|
|
filtered := make([]Message, 0, len(messages))
|
|
for _, msg := range messages {
|
|
if msg.Role != "system" {
|
|
filtered = append(filtered, msg)
|
|
}
|
|
}
|
|
return filtered
|
|
}
|
|
|