feat(ai-chat): 新增豆包API + AI模型配置项支持动态切换

- 后端新增豆包(火山引擎Ark)API集成:DoubaoController、ToolDoubaoServiceImpl,
  使用OkHttp3 SSE流式对话,兼容OpenAI Chat Completions格式
- 新增DoubaoConfig配置类,读取doubao.api.*配置
- 在eb_system_config表新增ai_chat_model配置项,支持doubao/coze/gemini三种模型切换
- 新增GET /api/front/doubao/ai-model-config接口供前端读取当前模型配置
- 前端ai-nutritionist.vue的sendToAI按系统配置分发到_sendViaDoubao/_sendViaCoze/_sendViaGemini
- 前端models-api.js新增doubaoChatStream/doubaoChat/getAiModelConfig函数
- 附带豆包API测试脚本和数据库初始化SQL

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
msh-agent
2026-04-11 18:03:21 +08:00
parent 58ea76498f
commit b164d8ba11
14 changed files with 1369 additions and 119 deletions

View File

@@ -812,6 +812,163 @@ function cozeTextToSpeech(data) {
})
}
/**
* 获取 AI 对话模型配置
* @returns {Promise} { data: { model: "doubao" | "coze" | "gemini" } }
*/
function getAiModelConfig() {
return request('/api/front/doubao/ai-model-config')
}
/**
* 豆包(火山引擎 Ark- 非流式对话
* @param {object} data 请求参数 { messages: [{role, content}], model?, temperature?, maxTokens? }
* @returns {Promise} 对话响应OpenAI Chat Completions 格式)
*/
function doubaoChat(data) {
return request('/api/front/doubao/chat', {
method: 'POST',
data: data
})
}
/**
* 豆包(火山引擎 Ark- 流式对话 (SSE + enableChunked)
* 返回 OpenAI 兼容的 SSE 事件流choices[0].delta.content
* @param {object} data 请求参数 { messages: [{role, content}] }
* @returns {object} 控制器 { onMessage(deltaText), onError, onComplete, abort }
*/
function doubaoChatStream(data) {
let _onMessage = () => {}
let _onError = () => {}
let _onComplete = () => {}
let _buffer = ''
let _task = null
let _gotChunks = false
const controller = {
onMessage(fn) { _onMessage = fn; return controller },
onError(fn) { _onError = fn; return controller },
onComplete(fn) { _onComplete = fn; return controller },
abort() { if (_task) _task.abort() },
getTask() { return _task }
}
/** 从 SSE data JSON 中提取增量文本OpenAI 兼容格式) */
const extractDeltaText = (evt) => {
if (evt && Array.isArray(evt.choices) && evt.choices[0]) {
const delta = evt.choices[0].delta
if (delta && typeof delta.content === 'string') return delta.content
const msg = evt.choices[0].message
if (msg && typeof msg.content === 'string') return msg.content
}
return ''
}
const parseSseLines = (text) => {
_buffer += text
const lines = _buffer.split('\n')
_buffer = lines.pop() || ''
for (const line of lines) {
const trimmed = line.trim()
if (!trimmed || trimmed.startsWith(':')) continue
if (trimmed === 'data: [DONE]') continue
if (trimmed.startsWith('data:')) {
const jsonStr = trimmed.slice(5).trim()
if (!jsonStr) continue
try {
const evt = JSON.parse(jsonStr)
const delta = extractDeltaText(evt)
if (delta) _onMessage(delta)
} catch (e) {
console.warn('[doubaoChatStream] JSON parse failed:', jsonStr.slice(0, 200))
}
}
}
}
const parseSseResponseBody = (body) => {
if (!body || typeof body !== 'string') return
const lines = body.split('\n')
for (const line of lines) {
const trimmed = line.trim()
if (!trimmed || trimmed.startsWith(':')) continue
if (trimmed === 'data: [DONE]') continue
if (trimmed.startsWith('data:')) {
const jsonStr = trimmed.slice(5).trim()
if (!jsonStr) continue
try {
const evt = JSON.parse(jsonStr)
const delta = extractDeltaText(evt)
if (delta) _onMessage(delta)
} catch (e) {
console.warn('[doubaoChatStream] JSON parse failed in body fallback:', jsonStr.slice(0, 200))
}
}
}
}
const token = store.state && store.state.app && store.state.app.token
_task = uni.request({
url: `${API_BASE_URL}/api/front/doubao/chat/stream`,
method: 'POST',
data: data,
header: {
'Content-Type': 'application/json',
'Accept': 'text/event-stream',
...(token ? { [TOKENNAME]: token } : {})
},
enableChunked: true,
responseType: 'text',
success: (res) => {
console.log('[doubaoChatStream] success: statusCode=', res.statusCode, '_gotChunks=', _gotChunks)
if (res.statusCode !== 200) {
let errMsg = '请求失败: ' + res.statusCode
try {
const body = typeof res.data === 'string' ? JSON.parse(res.data) : res.data
if (body && body.message) errMsg = body.message
else if (body && body.error && body.error.message) errMsg = body.error.message
} catch (e) { /* keep default message */ }
_onError(new Error(errMsg))
return
}
// 处理 buffer 中残余内容
if (_buffer.trim()) {
parseSseLines('\n')
}
// 不支持 chunked 的环境降级:从完整 response body 解析
if (!_gotChunks && res && res.data) {
const body = typeof res.data === 'string' ? res.data : JSON.stringify(res.data)
parseSseResponseBody(body)
}
_onComplete()
},
fail: (err) => {
console.error('[doubaoChatStream] request fail:', err)
_onError(err)
}
})
if (_task && _task.onChunkReceived) {
_task.onChunkReceived((res) => {
_gotChunks = true
try {
const bytes = new Uint8Array(res.data)
let text = ''
for (let i = 0; i < bytes.length; i++) {
text += String.fromCharCode(bytes[i])
}
text = decodeURIComponent(escape(text))
parseSseLines(text)
} catch (e) {
console.warn('[doubaoChatStream] chunk decode error:', e)
}
})
}
return controller
}
export default {
request,
getArticleById,
@@ -835,5 +992,10 @@ export default {
cozeWorkflowStream,
cozeWorkflowResume,
cozeUploadFile,
cozeTextToSpeech
cozeTextToSpeech,
// 豆包 API
doubaoChat,
doubaoChatStream,
// AI 模型配置
getAiModelConfig
}

View File

@@ -224,6 +224,7 @@ export default {
},
data() {
return {
aiModel: 'doubao', // 当前AI模型: doubao / coze / gemini从系统配置读取
botId: '7591133240535449654',
conversationId: '',
scrollTop: 0,
@@ -261,6 +262,7 @@ export default {
});
this.initRecorder();
this.initAudioContext();
this.loadAiModelConfig();
},
onUnload() {
this.stopRecordTimer();
@@ -277,6 +279,19 @@ export default {
}
},
methods: {
/** 加载 AI 模型配置 */
loadAiModelConfig() {
api.getAiModelConfig().then(res => {
const data = res && res.data
if (data && data.model) {
this.aiModel = data.model.trim().toLowerCase()
console.log('[ai-nutritionist] AI模型配置:', this.aiModel)
}
}).catch(err => {
console.warn('[ai-nutritionist] 获取AI模型配置失败使用默认 doubao:', err)
})
},
// 初始化录音管理器
initRecorder() {
// #ifdef MP-WEIXIN || APP-PLUS
@@ -744,87 +759,65 @@ export default {
return '';
},
/** 从 Gemini(KieAI) 非流式响应中提取回复文本 */
getGeminiReplyFromResponse(response) {
if (!response || typeof response !== 'object') return ''
let data = response.data
if (typeof data === 'string') {
try { data = JSON.parse(data) } catch (e) { return '' }
}
const payload = this.getGeminiPayload(data || response)
if (!payload) return ''
// OpenAI 形态: choices[0].message.content
if (Array.isArray(payload.choices) && payload.choices[0]) {
const msg = payload.choices[0].message
if (msg) return this.extractReplyContent(msg.content)
const delta = payload.choices[0].delta
if (delta) return this.extractReplyContent(delta.content)
}
// Gemini 形态: candidates[0].content
if (Array.isArray(payload.candidates) && payload.candidates[0]) {
return this.extractReplyContent(payload.candidates[0].content)
}
return ''
},
/** 工具方法sleep ms 毫秒 */
sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
},
buildGeminiMessages(content, type) {
if (type === 'text') {
return [{ role: 'user', content: typeof content === 'string' ? content : String(content) }];
}
const parts = Array.isArray(content) ? content : [{ type: 'text', text: String(content) }];
return [{ role: 'user', content: parts }];
},
/**
* BUG-005主路径为 response.data.choices[0].message.contentmodels-api 已尽量规范 data 形态)。
* 兼容上游 candidates、或根级即 completion 等变体;仅使用接口返回字段,
* 成功路径不使用 api/tool.js 的 getAIResponse 或本地关键词话术。
* 构建 OpenAI 兼容格式的消息列表(用于豆包 API
* @param {string|Array} content 消息内容
* @param {string} type 消息类型 text / image / multimodal
* @returns {Array} messages 数组 [{role, content}]
*/
getGeminiReplyFromResponse(response) {
if (!response || typeof response !== 'object') return '';
const looksLikeCompletion = (o) =>
o && typeof o === 'object' &&
(Array.isArray(o.choices) || Array.isArray(o.candidates));
let data = response.data;
if (typeof data === 'string') {
try {
data = JSON.parse(data);
} catch (e) {
return '';
}
buildChatMessages(content, type) {
if (type === 'text') {
return [{ role: 'user', content: typeof content === 'string' ? content : String(content) }]
}
// 主路径OpenAI 形态 choices[0].message.content与需求 data.choices[0].message.content 一致)
if (data && typeof data === 'object' && Array.isArray(data.choices) && data.choices.length > 0) {
const choice0 = data.choices[0];
const msg = choice0 && choice0.message;
if (msg && typeof msg === 'object') {
const fromMsg = this.extractReplyContent(msg.content);
if (fromMsg.trim()) return fromMsg;
if (type === 'image') {
// 图片:尝试构建多模态消息
let fileInfo = content
if (typeof fileInfo === 'string') {
try { fileInfo = JSON.parse(fileInfo) } catch (e) { /* 非JSON */ }
}
const delta = choice0 && choice0.delta;
if (delta && typeof delta === 'object') {
const fromDelta = this.extractReplyContent(delta.content);
if (fromDelta.trim()) return fromDelta;
}
if (choice0 && typeof choice0.text === 'string' && choice0.text.trim()) {
return choice0.text;
const imageUrl = (fileInfo && fileInfo.url) || (fileInfo && fileInfo.path) || ''
if (imageUrl) {
return [{
role: 'user',
content: [
{ type: 'image_url', image_url: { url: imageUrl } },
{ type: 'text', text: '请分析这张图片' }
]
}]
}
return [{ role: 'user', content: '我发送了一张图片,请帮我分析' }]
}
if (!looksLikeCompletion(data) && looksLikeCompletion(response)) {
data = response;
}
if (!data || typeof data !== 'object') return '';
const payload = this.getGeminiPayload(data);
if (!payload || typeof payload !== 'object') return '';
const choices = payload.choices;
if (Array.isArray(choices) && choices.length > 0) {
const choice0 = choices[0];
const msg = choice0 && choice0.message;
if (msg && typeof msg === 'object') {
const fromMsg = this.extractReplyContent(msg.content);
if (fromMsg.trim()) return fromMsg;
}
const delta = choice0 && choice0.delta;
if (delta && typeof delta === 'object') {
const fromDelta = this.extractReplyContent(delta.content);
if (fromDelta.trim()) return fromDelta;
}
if (choice0 && typeof choice0.text === 'string' && choice0.text.trim()) {
return choice0.text;
}
}
const cands = payload.candidates;
if (Array.isArray(cands) && cands.length > 0) {
const c0 = cands[0];
if (c0 && typeof c0 === 'object') {
const fromCand = this.extractReplyContent(c0.content);
if (fromCand.trim()) return fromCand;
if (typeof c0.text === 'string' && c0.text.trim()) return c0.text;
}
}
return '';
// multimodal直接传多模态 parts
const parts = Array.isArray(content) ? content : [{ type: 'text', text: String(content) }]
return [{ role: 'user', content: parts }]
},
async sendToAI(content, type) {
@@ -833,54 +826,15 @@ export default {
this.messageList.push(aiMsg);
this.scrollToBottom();
const messages = this.buildGeminiMessages(content, type);
// 优先尝试流式输出以改善响应速度感知
try {
await new Promise((resolve, reject) => {
const ctrl = api.kieaiGeminiChatStream({ messages })
this._streamCtrl = ctrl
// 收到第一个 chunk 后切换为 streaming 状态
ctrl.onMessage((deltaText) => {
if (aiMsg.loading) {
aiMsg.loading = false
aiMsg.streaming = true
}
aiMsg.content += deltaText
this.messageList = [...this.messageList]
this.scrollToBottom()
})
ctrl.onError((err) => {
console.warn('[sendToAI] 流式请求失败,降级为非流式:', err)
this._streamCtrl = null
reject(err)
})
ctrl.onComplete(() => {
this._streamCtrl = null
resolve()
})
})
// 流式完成,检查内容
if (!aiMsg.content.trim()) {
aiMsg.content = '模型未返回有效内容,请稍后重试。'
}
} catch (streamError) {
// 流式失败,降级为非流式请求
try {
const response = await api.kieaiGeminiChat({
messages,
stream: false
});
const reply = this.getGeminiReplyFromResponse(response);
aiMsg.content = reply.trim() ? reply : '模型未返回有效内容,请稍后重试。';
} catch (error) {
console.error('KieAI Gemini 对话失败:', error);
const errText = error && error.message ? String(error.message) : '';
aiMsg.content = errText || '抱歉,处理您的请求时出现错误,请稍后再试。';
// 根据系统配置的 aiModel 分发到不同的 API
if (this.aiModel === 'coze') {
await this._sendViaCoze(content, type, aiMsg)
} else if (this.aiModel === 'gemini') {
await this._sendViaGemini(content, type, aiMsg)
} else {
// 默认:豆包
await this._sendViaDoubao(content, type, aiMsg)
}
} finally {
aiMsg.loading = false;
@@ -897,6 +851,159 @@ export default {
}
},
// ========== 豆包 APIOpenAI 兼容,默认) ==========
async _sendViaDoubao(content, type, aiMsg) {
const messages = this.buildChatMessages(content, type)
try {
await new Promise((resolve, reject) => {
const ctrl = api.doubaoChatStream({ messages })
this._streamCtrl = ctrl
ctrl.onMessage((deltaText) => {
if (aiMsg.loading) { aiMsg.loading = false; aiMsg.streaming = true }
aiMsg.content += deltaText
this.messageList = [...this.messageList]
this.scrollToBottom()
})
ctrl.onError((err) => { this._streamCtrl = null; reject(err) })
ctrl.onComplete(() => { this._streamCtrl = null; resolve() })
})
if (!aiMsg.content.trim()) aiMsg.content = '模型未返回有效内容,请稍后重试。'
} catch (streamError) {
console.warn('[sendToAI:doubao] 流式失败,降级非流式:', streamError)
try {
const response = await api.doubaoChat({ messages, stream: false })
let reply = ''
if (response && response.choices && response.choices[0]) {
const msg = response.choices[0].message
if (msg && msg.content) reply = msg.content
}
aiMsg.content = reply.trim() ? reply : '模型未返回有效内容,请稍后重试。'
} catch (error) {
console.error('豆包对话失败:', error)
aiMsg.content = (error && error.message) || '抱歉,处理您的请求时出现错误,请稍后再试。'
}
}
},
// ========== Coze API ==========
async _sendViaCoze(content, type, aiMsg) {
const userId = this.uid || (uni.getStorageSync('userInfo') || {}).id || 'default_user'
const cozeMessages = this.buildChatMessages(content, type).map(m => ({
...m, content_type: 'text'
}))
const reqData = {
botId: this.botId,
userId: String(userId),
additionalMessages: cozeMessages
}
if (this.conversationId) reqData.conversationId = this.conversationId
try {
await new Promise((resolve, reject) => {
const ctrl = api.cozeChatStream(reqData)
this._streamCtrl = ctrl
ctrl.onMessage((evt) => {
if (evt.conversation_id && !this.conversationId) {
this.conversationId = evt.conversation_id
}
if (evt.event === 'conversation.message.delta' && evt.type === 'answer' && evt.content) {
if (aiMsg.loading) { aiMsg.loading = false; aiMsg.streaming = true }
aiMsg.content += evt.content
this.messageList = [...this.messageList]
this.scrollToBottom()
}
if (evt.event === 'conversation.chat.completed' && evt.conversation_id) {
this.conversationId = evt.conversation_id
}
})
ctrl.onError((err) => { this._streamCtrl = null; reject(err) })
ctrl.onComplete(() => { this._streamCtrl = null; resolve() })
})
if (!aiMsg.content.trim()) aiMsg.content = '模型未返回有效内容,请稍后重试。'
} catch (streamError) {
console.warn('[sendToAI:coze] 流式失败,降级非流式:', streamError)
try {
reqData.stream = false
const response = await api.cozeChat(reqData)
if (response && response.data && response.data.chat) {
const { conversation_id, id: chat_id } = response.data.chat
if (conversation_id) this.conversationId = conversation_id
await this.pollCozeResult(conversation_id, chat_id, aiMsg)
} else {
aiMsg.content = '模型未返回有效内容,请稍后重试。'
}
} catch (error) {
console.error('Coze 对话失败:', error)
aiMsg.content = (error && error.message) || '抱歉,处理您的请求时出现错误,请稍后再试。'
}
}
},
// ========== Gemini API (KieAI) ==========
async _sendViaGemini(content, type, aiMsg) {
const messages = this.buildChatMessages(content, type)
try {
await new Promise((resolve, reject) => {
const ctrl = api.kieaiGeminiChatStream({ messages })
this._streamCtrl = ctrl
ctrl.onMessage((deltaText) => {
if (aiMsg.loading) { aiMsg.loading = false; aiMsg.streaming = true }
aiMsg.content += deltaText
this.messageList = [...this.messageList]
this.scrollToBottom()
})
ctrl.onError((err) => { this._streamCtrl = null; reject(err) })
ctrl.onComplete(() => { this._streamCtrl = null; resolve() })
})
if (!aiMsg.content.trim()) aiMsg.content = '模型未返回有效内容,请稍后重试。'
} catch (streamError) {
console.warn('[sendToAI:gemini] 流式失败,降级非流式:', streamError)
try {
const response = await api.kieaiGeminiChat({ messages, stream: false })
const reply = this.getGeminiReplyFromResponse(response)
aiMsg.content = reply.trim() ? reply : '模型未返回有效内容,请稍后重试。'
} catch (error) {
console.error('Gemini 对话失败:', error)
aiMsg.content = (error && error.message) || '抱歉,处理您的请求时出现错误,请稍后再试。'
}
}
},
/**
* 非流式 Coze 降级:轮询对话状态并获取回复
*/
async pollCozeResult(conversationId, chatId, aiMsg) {
const maxAttempts = 30
const interval = 2000
for (let i = 0; i < maxAttempts; i++) {
await this.sleep(interval)
try {
const res = await api.cozeRetrieveChat({ conversationId, chatId })
const status = res && res.data && res.data.status
if (status === 'completed') {
const msgRes = await api.cozeMessageList({ conversationId, chatId })
if (msgRes && msgRes.data && Array.isArray(msgRes.data)) {
const answer = msgRes.data.find(m => m.role === 'assistant' && m.type === 'answer')
if (answer && answer.content) {
aiMsg.content = answer.content
this.messageList = [...this.messageList]
this.scrollToBottom()
return
}
}
aiMsg.content = '模型未返回有效内容,请稍后重试。'
return
} else if (status === 'failed') {
aiMsg.content = '对话处理失败,请稍后重试。'
return
}
} catch (e) {
console.warn('[pollCozeResult] 轮询出错:', e)
}
}
aiMsg.content = '等待超时,请稍后重试。'
},
// ---------- TTS 方法 ----------
initAudioContext() {