0.13
This commit is contained in:
parent
cb12290ecc
commit
a0e3b05fd2
11
.env
11
.env
@ -1,11 +1,14 @@
|
||||
# 聚合数据API
|
||||
JUHE_NEWS_KEY=edbc3b96f022b59141961e2137f69b4a
|
||||
#JUHE_NEWS_KEY=edbc3b96f022b59141961e2137f69b4a
|
||||
|
||||
# 彩云天气API
|
||||
CAIYUN_API_KEY=29-CwtZrOXU1b3Cx
|
||||
#CAIYUN_API_KEY=29-CwtZrOXU1b3Cx
|
||||
|
||||
# 和风天气API
|
||||
QWEATHER_API_KEY=ecd25018448140f1a8d23675c235e5b7
|
||||
#QWEATHER_API_KEY=ecd25018448140f1a8d23675c235e5b7
|
||||
|
||||
# vite api配置
|
||||
VITE_API_BASE=http://localhost:${API_PORT}
|
||||
VITE_API_BASE=http://localhost:${API_PORT}
|
||||
|
||||
# deepseek api配置
|
||||
DEEPSEEK_API_KEY=sk-38837763120b4728a8dfe66916e99d56
|
@ -5,14 +5,15 @@ import http from 'http';
|
||||
import { URL } from 'url';
|
||||
//import { apiRoutes } from './api'; // 导入API路由配置
|
||||
import dotenv from 'dotenv';
|
||||
// 添加openai模块
|
||||
import OpenAI from 'openai';
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') }); // 根据实际路径调整
|
||||
|
||||
// 确保在news.ts中能读取到
|
||||
console.log('JUHE_KEY:', process.env.JUHE_NEWS_KEY?.substring(0, 3) + '***'); // 打印前3位验证
|
||||
console.log('CAIYUN_KEY:', process.env.CAIYUN_API_KEY?.substring(0, 3) + '***'); // 打印前3位验证
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
// 环境配置
|
||||
const isDev = process.env.NODE_ENV === 'development';
|
||||
|
||||
@ -122,6 +123,38 @@ function registerIpcHandlers() {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// 聊天接口
|
||||
ipcMain.handle('chat-with-deepseek', async (event, { messages }) => {
|
||||
try {
|
||||
const openai = new OpenAI({
|
||||
baseURL: 'https://api.deepseek.com',
|
||||
apiKey: process.env.DEEPSEEK_API_KEY!,
|
||||
});
|
||||
|
||||
const stream = await openai.chat.completions.create({
|
||||
model: 'deepseek-chat',
|
||||
messages,
|
||||
stream: true,
|
||||
});
|
||||
|
||||
for await (const chunk of stream) {
|
||||
const content = chunk.choices[0]?.delta?.content || '';
|
||||
event.sender.send('chat-stream-chunk', content);
|
||||
}
|
||||
event.sender.send('chat-stream-end');
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('Deepseek API Error:', error);
|
||||
event.sender.send('chat-stream-error', error.message);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
const formatNews = (items: any[]) => items.map(item => ({
|
||||
@ -155,13 +188,6 @@ const apiCache = {
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// 禁用默认菜单(提升安全性和专业性)
|
||||
Menu.setApplicationMenu(null);
|
||||
|
||||
@ -186,7 +212,7 @@ async function createWindow() {
|
||||
mainWindow.once('ready-to-show', () => {
|
||||
mainWindow.show();
|
||||
if (isDev) {
|
||||
//mainWindow.webContents.openDevTools({ mode: 'detach' }); // 打开开发者工具
|
||||
mainWindow.webContents.openDevTools({ mode: 'detach' }); // 打开开发者工具
|
||||
console.log('Developer tools opened in detached mode');
|
||||
}
|
||||
});
|
||||
|
@ -1,8 +1,26 @@
|
||||
//electron\preload.ts
|
||||
import { contextBridge, ipcRenderer } from 'electron';
|
||||
// 在文件顶部添加类型声明
|
||||
interface Message {
|
||||
role: 'user' | 'assistant' | 'system';
|
||||
content: string;
|
||||
}
|
||||
|
||||
contextBridge.exposeInMainWorld('electronAPI', {
|
||||
getWeather: (params: { lon: number; lat: number }) =>
|
||||
// 天气接口
|
||||
getWeather: (params: { lon: number; lat: number }) =>
|
||||
ipcRenderer.invoke('get-weather', params),
|
||||
getNews: () => ipcRenderer.invoke('get-news')
|
||||
// 新闻接口
|
||||
getNews: () => ipcRenderer.invoke('get-news'),
|
||||
|
||||
// 聊天接口
|
||||
chatWithDeepseek: (messages: Message[]) => ipcRenderer.invoke('chat-with-deepseek', { messages }),
|
||||
onChatStreamChunk: (callback: (event: unknown, chunk: string) => void) =>
|
||||
ipcRenderer.on('chat-stream-chunk', callback),
|
||||
onChatStreamEnd: (callback: () => void) =>
|
||||
ipcRenderer.on('chat-stream-end', callback),
|
||||
onChatStreamError: (callback: (event: unknown, error: string) => void) =>
|
||||
ipcRenderer.on('chat-stream-error', callback),
|
||||
removeListener: (channel: string, callback: (...args: any[]) => void) =>
|
||||
ipcRenderer.removeListener(channel, callback)
|
||||
});
|
@ -19,6 +19,7 @@
|
||||
"@emotion/styled": "^11.14.0",
|
||||
"@mui/icons-material": "^6.4.5",
|
||||
"@mui/material": "^6.4.5",
|
||||
"openai": "^4.87.3",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-icons": "^5.5.0",
|
||||
|
272
pnpm-lock.yaml
272
pnpm-lock.yaml
@ -20,6 +20,9 @@ importers:
|
||||
'@mui/material':
|
||||
specifier: ^6.4.5
|
||||
version: 6.4.6(@emotion/react@11.14.0(@types/react@19.0.10)(react@19.0.0))(@emotion/styled@11.14.0(@emotion/react@11.14.0(@types/react@19.0.10)(react@19.0.0))(@types/react@19.0.10)(react@19.0.0))(@types/react@19.0.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)
|
||||
openai:
|
||||
specifier: ^4.87.3
|
||||
version: 4.87.3(encoding@0.1.13)
|
||||
react:
|
||||
specifier: ^19.0.0
|
||||
version: 19.0.0
|
||||
@ -742,6 +745,12 @@ packages:
|
||||
'@types/keyv@3.1.4':
|
||||
resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==}
|
||||
|
||||
'@types/node-fetch@2.6.12':
|
||||
resolution: {integrity: sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==}
|
||||
|
||||
'@types/node@18.19.80':
|
||||
resolution: {integrity: sha512-kEWeMwMeIvxYkeg1gTc01awpwLbfMRZXdIhwRcakd/KlK53jmRC26LqcbIt7fnAQTu5GzlnWmzA3H6+l1u6xxQ==}
|
||||
|
||||
'@types/node@20.17.22':
|
||||
resolution: {integrity: sha512-9RV2zST+0s3EhfrMZIhrz2bhuhBwxgkbHEwP2gtGWPjBzVQjifMzJ9exw7aDZhR1wbpj8zBrfp3bo8oJcGiUUw==}
|
||||
|
||||
@ -830,6 +839,10 @@ packages:
|
||||
resolution: {integrity: sha512-+/kfrslGQ7TNV2ecmQwMJj/B65g5KVq1/L3SGVZ3tCYGqlzFuFCGBZJtMP99wH3NpEUyAjn0zPdPUg0D+DwrOA==}
|
||||
engines: {node: ^18.17.0 || >=20.5.0}
|
||||
|
||||
abort-controller@3.0.0:
|
||||
resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
|
||||
engines: {node: '>=6.5'}
|
||||
|
||||
acorn-jsx@5.3.2:
|
||||
resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
|
||||
peerDependencies:
|
||||
@ -844,6 +857,10 @@ packages:
|
||||
resolution: {integrity: sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==}
|
||||
engines: {node: '>= 14'}
|
||||
|
||||
agentkeepalive@4.6.0:
|
||||
resolution: {integrity: sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==}
|
||||
engines: {node: '>= 8.0.0'}
|
||||
|
||||
ajv@6.12.6:
|
||||
resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
|
||||
|
||||
@ -876,6 +893,9 @@ packages:
|
||||
argparse@2.0.1:
|
||||
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
|
||||
|
||||
asynckit@0.4.0:
|
||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
||||
|
||||
autoprefixer@10.4.20:
|
||||
resolution: {integrity: sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==}
|
||||
engines: {node: ^10 || ^12 || >=14}
|
||||
@ -928,6 +948,10 @@ packages:
|
||||
resolution: {integrity: sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
call-bind-apply-helpers@1.0.2:
|
||||
resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
callsites@3.1.0:
|
||||
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
|
||||
engines: {node: '>=6'}
|
||||
@ -969,6 +993,10 @@ packages:
|
||||
color-name@1.1.4:
|
||||
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
|
||||
|
||||
combined-stream@1.0.8:
|
||||
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
|
||||
engines: {node: '>= 0.8'}
|
||||
|
||||
commander@4.1.1:
|
||||
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
|
||||
engines: {node: '>= 6'}
|
||||
@ -1039,6 +1067,10 @@ packages:
|
||||
resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
delayed-stream@1.0.0:
|
||||
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
|
||||
dequal@2.0.3:
|
||||
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
|
||||
engines: {node: '>=6'}
|
||||
@ -1059,6 +1091,10 @@ packages:
|
||||
resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
dunder-proto@1.0.1:
|
||||
resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
eastasianwidth@0.2.0:
|
||||
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
||||
|
||||
@ -1103,6 +1139,14 @@ packages:
|
||||
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
es-object-atoms@1.1.1:
|
||||
resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
es-set-tostringtag@2.1.0:
|
||||
resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
es6-error@4.1.1:
|
||||
resolution: {integrity: sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==}
|
||||
|
||||
@ -1172,6 +1216,10 @@ packages:
|
||||
resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
event-target-shim@5.0.1:
|
||||
resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
exponential-backoff@3.1.2:
|
||||
resolution: {integrity: sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA==}
|
||||
|
||||
@ -1225,6 +1273,17 @@ packages:
|
||||
resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==}
|
||||
engines: {node: '>=14'}
|
||||
|
||||
form-data-encoder@1.7.2:
|
||||
resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==}
|
||||
|
||||
form-data@4.0.2:
|
||||
resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==}
|
||||
engines: {node: '>= 6'}
|
||||
|
||||
formdata-node@4.4.1:
|
||||
resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==}
|
||||
engines: {node: '>= 12.20'}
|
||||
|
||||
fraction.js@4.3.7:
|
||||
resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==}
|
||||
|
||||
@ -1252,6 +1311,14 @@ packages:
|
||||
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
|
||||
engines: {node: 6.* || 8.* || >= 10.*}
|
||||
|
||||
get-intrinsic@1.3.0:
|
||||
resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
get-proto@1.0.1:
|
||||
resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
get-stream@5.2.0:
|
||||
resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==}
|
||||
engines: {node: '>=8'}
|
||||
@ -1313,6 +1380,14 @@ packages:
|
||||
has-property-descriptors@1.0.2:
|
||||
resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==}
|
||||
|
||||
has-symbols@1.1.0:
|
||||
resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
has-tostringtag@1.0.2:
|
||||
resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
hasown@2.0.2:
|
||||
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@ -1335,6 +1410,9 @@ packages:
|
||||
resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==}
|
||||
engines: {node: '>= 14'}
|
||||
|
||||
humanize-ms@1.2.1:
|
||||
resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==}
|
||||
|
||||
iconv-lite@0.6.3:
|
||||
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@ -1498,6 +1576,10 @@ packages:
|
||||
resolution: {integrity: sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
math-intrinsics@1.1.0:
|
||||
resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
merge2@1.4.1:
|
||||
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
|
||||
engines: {node: '>= 8'}
|
||||
@ -1506,6 +1588,14 @@ packages:
|
||||
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
|
||||
engines: {node: '>=8.6'}
|
||||
|
||||
mime-db@1.52.0:
|
||||
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
mime-types@2.1.35:
|
||||
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
mimic-response@1.0.1:
|
||||
resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==}
|
||||
engines: {node: '>=4'}
|
||||
@ -1586,6 +1676,19 @@ packages:
|
||||
resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
node-domexception@1.0.0:
|
||||
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
|
||||
engines: {node: '>=10.5.0'}
|
||||
|
||||
node-fetch@2.7.0:
|
||||
resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==}
|
||||
engines: {node: 4.x || >=6.0.0}
|
||||
peerDependencies:
|
||||
encoding: ^0.1.0
|
||||
peerDependenciesMeta:
|
||||
encoding:
|
||||
optional: true
|
||||
|
||||
node-gyp@11.1.0:
|
||||
resolution: {integrity: sha512-/+7TuHKnBpnMvUQnsYEb0JOozDZqarQbfNuSGLXIjhStMT0fbw7IdSqWgopOP5xhRZE+lsbIvAHcekddruPZgQ==}
|
||||
engines: {node: ^18.17.0 || >=20.5.0}
|
||||
@ -1631,6 +1734,18 @@ packages:
|
||||
once@1.4.0:
|
||||
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
|
||||
|
||||
openai@4.87.3:
|
||||
resolution: {integrity: sha512-d2D54fzMuBYTxMW8wcNmhT1rYKcTfMJ8t+4KjH2KtvYenygITiGBgHoIrzHwnDQWW+C5oCA+ikIR2jgPCFqcKQ==}
|
||||
hasBin: true
|
||||
peerDependencies:
|
||||
ws: ^8.18.0
|
||||
zod: ^3.23.8
|
||||
peerDependenciesMeta:
|
||||
ws:
|
||||
optional: true
|
||||
zod:
|
||||
optional: true
|
||||
|
||||
optionator@0.9.4:
|
||||
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@ -2026,6 +2141,9 @@ packages:
|
||||
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
|
||||
hasBin: true
|
||||
|
||||
tr46@0.0.3:
|
||||
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
|
||||
|
||||
tree-kill@1.2.2:
|
||||
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
|
||||
hasBin: true
|
||||
@ -2065,6 +2183,9 @@ packages:
|
||||
undefsafe@2.0.5:
|
||||
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
|
||||
|
||||
undici-types@5.26.5:
|
||||
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
||||
|
||||
undici-types@6.19.8:
|
||||
resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==}
|
||||
|
||||
@ -2143,6 +2264,16 @@ packages:
|
||||
yaml:
|
||||
optional: true
|
||||
|
||||
web-streams-polyfill@4.0.0-beta.3:
|
||||
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
|
||||
engines: {node: '>= 14'}
|
||||
|
||||
webidl-conversions@3.0.1:
|
||||
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
|
||||
|
||||
whatwg-url@5.0.0:
|
||||
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
|
||||
|
||||
which@2.0.2:
|
||||
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
|
||||
engines: {node: '>= 8'}
|
||||
@ -2806,6 +2937,15 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 22.13.8
|
||||
|
||||
'@types/node-fetch@2.6.12':
|
||||
dependencies:
|
||||
'@types/node': 22.13.8
|
||||
form-data: 4.0.2
|
||||
|
||||
'@types/node@18.19.80':
|
||||
dependencies:
|
||||
undici-types: 5.26.5
|
||||
|
||||
'@types/node@20.17.22':
|
||||
dependencies:
|
||||
undici-types: 6.19.8
|
||||
@ -2929,6 +3069,10 @@ snapshots:
|
||||
|
||||
abbrev@3.0.0: {}
|
||||
|
||||
abort-controller@3.0.0:
|
||||
dependencies:
|
||||
event-target-shim: 5.0.1
|
||||
|
||||
acorn-jsx@5.3.2(acorn@8.14.0):
|
||||
dependencies:
|
||||
acorn: 8.14.0
|
||||
@ -2937,6 +3081,10 @@ snapshots:
|
||||
|
||||
agent-base@7.1.3: {}
|
||||
|
||||
agentkeepalive@4.6.0:
|
||||
dependencies:
|
||||
humanize-ms: 1.2.1
|
||||
|
||||
ajv@6.12.6:
|
||||
dependencies:
|
||||
fast-deep-equal: 3.1.3
|
||||
@ -2965,6 +3113,8 @@ snapshots:
|
||||
|
||||
argparse@2.0.1: {}
|
||||
|
||||
asynckit@0.4.0: {}
|
||||
|
||||
autoprefixer@10.4.20(postcss@8.5.3):
|
||||
dependencies:
|
||||
browserslist: 4.24.4
|
||||
@ -3037,6 +3187,11 @@ snapshots:
|
||||
normalize-url: 6.1.0
|
||||
responselike: 2.0.1
|
||||
|
||||
call-bind-apply-helpers@1.0.2:
|
||||
dependencies:
|
||||
es-errors: 1.3.0
|
||||
function-bind: 1.1.2
|
||||
|
||||
callsites@3.1.0: {}
|
||||
|
||||
camelcase-css@2.0.1: {}
|
||||
@ -3080,6 +3235,10 @@ snapshots:
|
||||
|
||||
color-name@1.1.4: {}
|
||||
|
||||
combined-stream@1.0.8:
|
||||
dependencies:
|
||||
delayed-stream: 1.0.0
|
||||
|
||||
commander@4.1.1: {}
|
||||
|
||||
concat-map@0.0.1: {}
|
||||
@ -3150,6 +3309,8 @@ snapshots:
|
||||
object-keys: 1.1.1
|
||||
optional: true
|
||||
|
||||
delayed-stream@1.0.0: {}
|
||||
|
||||
dequal@2.0.3: {}
|
||||
|
||||
detect-node@2.1.0:
|
||||
@ -3166,6 +3327,12 @@ snapshots:
|
||||
|
||||
dotenv@16.4.7: {}
|
||||
|
||||
dunder-proto@1.0.1:
|
||||
dependencies:
|
||||
call-bind-apply-helpers: 1.0.2
|
||||
es-errors: 1.3.0
|
||||
gopd: 1.2.0
|
||||
|
||||
eastasianwidth@0.2.0: {}
|
||||
|
||||
electron-devtools-installer@4.0.0:
|
||||
@ -3203,11 +3370,20 @@ snapshots:
|
||||
dependencies:
|
||||
is-arrayish: 0.2.1
|
||||
|
||||
es-define-property@1.0.1:
|
||||
optional: true
|
||||
es-define-property@1.0.1: {}
|
||||
|
||||
es-errors@1.3.0:
|
||||
optional: true
|
||||
es-errors@1.3.0: {}
|
||||
|
||||
es-object-atoms@1.1.1:
|
||||
dependencies:
|
||||
es-errors: 1.3.0
|
||||
|
||||
es-set-tostringtag@2.1.0:
|
||||
dependencies:
|
||||
es-errors: 1.3.0
|
||||
get-intrinsic: 1.3.0
|
||||
has-tostringtag: 1.0.2
|
||||
hasown: 2.0.2
|
||||
|
||||
es6-error@4.1.1:
|
||||
optional: true
|
||||
@ -3320,6 +3496,8 @@ snapshots:
|
||||
|
||||
esutils@2.0.3: {}
|
||||
|
||||
event-target-shim@5.0.1: {}
|
||||
|
||||
exponential-backoff@3.1.2: {}
|
||||
|
||||
extract-zip@2.0.1:
|
||||
@ -3381,6 +3559,20 @@ snapshots:
|
||||
cross-spawn: 7.0.6
|
||||
signal-exit: 4.1.0
|
||||
|
||||
form-data-encoder@1.7.2: {}
|
||||
|
||||
form-data@4.0.2:
|
||||
dependencies:
|
||||
asynckit: 0.4.0
|
||||
combined-stream: 1.0.8
|
||||
es-set-tostringtag: 2.1.0
|
||||
mime-types: 2.1.35
|
||||
|
||||
formdata-node@4.4.1:
|
||||
dependencies:
|
||||
node-domexception: 1.0.0
|
||||
web-streams-polyfill: 4.0.0-beta.3
|
||||
|
||||
fraction.js@4.3.7: {}
|
||||
|
||||
fs-extra@8.1.0:
|
||||
@ -3402,6 +3594,24 @@ snapshots:
|
||||
|
||||
get-caller-file@2.0.5: {}
|
||||
|
||||
get-intrinsic@1.3.0:
|
||||
dependencies:
|
||||
call-bind-apply-helpers: 1.0.2
|
||||
es-define-property: 1.0.1
|
||||
es-errors: 1.3.0
|
||||
es-object-atoms: 1.1.1
|
||||
function-bind: 1.1.2
|
||||
get-proto: 1.0.1
|
||||
gopd: 1.2.0
|
||||
has-symbols: 1.1.0
|
||||
hasown: 2.0.2
|
||||
math-intrinsics: 1.1.0
|
||||
|
||||
get-proto@1.0.1:
|
||||
dependencies:
|
||||
dunder-proto: 1.0.1
|
||||
es-object-atoms: 1.1.1
|
||||
|
||||
get-stream@5.2.0:
|
||||
dependencies:
|
||||
pump: 3.0.2
|
||||
@ -3445,8 +3655,7 @@ snapshots:
|
||||
gopd: 1.2.0
|
||||
optional: true
|
||||
|
||||
gopd@1.2.0:
|
||||
optional: true
|
||||
gopd@1.2.0: {}
|
||||
|
||||
got@11.8.6:
|
||||
dependencies:
|
||||
@ -3475,6 +3684,12 @@ snapshots:
|
||||
es-define-property: 1.0.1
|
||||
optional: true
|
||||
|
||||
has-symbols@1.1.0: {}
|
||||
|
||||
has-tostringtag@1.0.2:
|
||||
dependencies:
|
||||
has-symbols: 1.1.0
|
||||
|
||||
hasown@2.0.2:
|
||||
dependencies:
|
||||
function-bind: 1.1.2
|
||||
@ -3504,6 +3719,10 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
humanize-ms@1.2.1:
|
||||
dependencies:
|
||||
ms: 2.1.3
|
||||
|
||||
iconv-lite@0.6.3:
|
||||
dependencies:
|
||||
safer-buffer: 2.1.2
|
||||
@ -3655,6 +3874,8 @@ snapshots:
|
||||
escape-string-regexp: 4.0.0
|
||||
optional: true
|
||||
|
||||
math-intrinsics@1.1.0: {}
|
||||
|
||||
merge2@1.4.1: {}
|
||||
|
||||
micromatch@4.0.8:
|
||||
@ -3662,6 +3883,12 @@ snapshots:
|
||||
braces: 3.0.3
|
||||
picomatch: 2.3.1
|
||||
|
||||
mime-db@1.52.0: {}
|
||||
|
||||
mime-types@2.1.35:
|
||||
dependencies:
|
||||
mime-db: 1.52.0
|
||||
|
||||
mimic-response@1.0.1: {}
|
||||
|
||||
mimic-response@3.1.0: {}
|
||||
@ -3733,6 +3960,14 @@ snapshots:
|
||||
|
||||
negotiator@1.0.0: {}
|
||||
|
||||
node-domexception@1.0.0: {}
|
||||
|
||||
node-fetch@2.7.0(encoding@0.1.13):
|
||||
dependencies:
|
||||
whatwg-url: 5.0.0
|
||||
optionalDependencies:
|
||||
encoding: 0.1.13
|
||||
|
||||
node-gyp@11.1.0:
|
||||
dependencies:
|
||||
env-paths: 2.2.1
|
||||
@ -3784,6 +4019,18 @@ snapshots:
|
||||
dependencies:
|
||||
wrappy: 1.0.2
|
||||
|
||||
openai@4.87.3(encoding@0.1.13):
|
||||
dependencies:
|
||||
'@types/node': 18.19.80
|
||||
'@types/node-fetch': 2.6.12
|
||||
abort-controller: 3.0.0
|
||||
agentkeepalive: 4.6.0
|
||||
form-data-encoder: 1.7.2
|
||||
formdata-node: 4.4.1
|
||||
node-fetch: 2.7.0(encoding@0.1.13)
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
|
||||
optionator@0.9.4:
|
||||
dependencies:
|
||||
deep-is: 0.1.4
|
||||
@ -4199,6 +4446,8 @@ snapshots:
|
||||
|
||||
touch@3.1.1: {}
|
||||
|
||||
tr46@0.0.3: {}
|
||||
|
||||
tree-kill@1.2.2: {}
|
||||
|
||||
ts-api-utils@2.0.1(typescript@5.7.3):
|
||||
@ -4230,6 +4479,8 @@ snapshots:
|
||||
|
||||
undefsafe@2.0.5: {}
|
||||
|
||||
undici-types@5.26.5: {}
|
||||
|
||||
undici-types@6.19.8: {}
|
||||
|
||||
undici-types@6.20.0: {}
|
||||
@ -4277,6 +4528,15 @@ snapshots:
|
||||
jiti: 1.21.7
|
||||
yaml: 2.7.0
|
||||
|
||||
web-streams-polyfill@4.0.0-beta.3: {}
|
||||
|
||||
webidl-conversions@3.0.1: {}
|
||||
|
||||
whatwg-url@5.0.0:
|
||||
dependencies:
|
||||
tr46: 0.0.3
|
||||
webidl-conversions: 3.0.1
|
||||
|
||||
which@2.0.2:
|
||||
dependencies:
|
||||
isexe: 2.0.0
|
||||
|
@ -91,7 +91,7 @@ const MagicMirror = () => {
|
||||
{/* 其他模块 */}
|
||||
<WeatherSection />
|
||||
<NewsSection items={data || []} />
|
||||
<VoiceAssistant greeting={greeting} />
|
||||
<VoiceAssistant/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
@ -1,340 +1,136 @@
|
||||
import { useState, useRef, useCallback } from "react";
|
||||
// src/components/VoiceAssistant.tsx
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
|
||||
interface ProcessState {
|
||||
recording: boolean;
|
||||
transcribing: boolean;
|
||||
generating: boolean;
|
||||
synthesizing: boolean;
|
||||
error?: string;
|
||||
thinking: boolean;
|
||||
speaking: boolean;
|
||||
}
|
||||
type Message = {
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
};
|
||||
|
||||
interface VoiceAssistantProps {
|
||||
greeting: string;
|
||||
}
|
||||
export const VoiceAssistant = () => {
|
||||
const [input, setInput] = useState("");
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const currentReplyRef = useRef("");
|
||||
|
||||
const ANALYSER_FFT_SIZE = 128;
|
||||
const VOLUME_SENSITIVITY = 1.5;
|
||||
const SMOOTHING_FACTOR = 0.7;
|
||||
const BAR_COUNT = 12;
|
||||
|
||||
const VoiceAssistant = ({ greeting }: VoiceAssistantProps) => {
|
||||
const [isListening, setIsListening] = useState(false);
|
||||
const [processState, setProcessState] = useState<ProcessState>({
|
||||
recording: false,
|
||||
transcribing: false,
|
||||
generating: false,
|
||||
synthesizing: false,
|
||||
error: undefined,
|
||||
thinking: false,
|
||||
speaking: false,
|
||||
});
|
||||
const [asrText, setAsrText] = useState("");
|
||||
const [answerText, setAnswerText] = useState("");
|
||||
const mediaRecorder = useRef<MediaRecorder | null>(null);
|
||||
const audioChunks = useRef<Blob[]>([]);
|
||||
const audioElement = useRef<HTMLAudioElement>(null);
|
||||
const barsRef = useRef<HTMLDivElement>(null);
|
||||
const mediaStreamRef = useRef<MediaStream | null>(null);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const analyserRef = useRef<AnalyserNode | null>(null);
|
||||
const animationFrameRef = useRef<number | null>(null);
|
||||
const dataArrayRef = useRef<Uint8Array | null>(null);
|
||||
const lastValuesRef = useRef<number[]>(new Array(BAR_COUNT).fill(10));
|
||||
const updateState = (newState: Partial<ProcessState>) => {
|
||||
setProcessState((prev) => ({ ...prev, ...newState }));
|
||||
};
|
||||
|
||||
const cleanupAudio = useCallback(async () => {
|
||||
mediaStreamRef.current?.getTracks().forEach((track) => track.stop());
|
||||
if (audioContextRef.current?.state !== "closed") {
|
||||
await audioContextRef.current?.close();
|
||||
}
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
}, []);
|
||||
const initializeAudioContext = useCallback(() => {
|
||||
const AudioContextClass =
|
||||
window.AudioContext || (window as any).webkitAudioContext;
|
||||
audioContextRef.current = new AudioContextClass();
|
||||
analyserRef.current = audioContextRef.current.createAnalyser();
|
||||
analyserRef.current.fftSize = ANALYSER_FFT_SIZE;
|
||||
analyserRef.current.smoothingTimeConstant = SMOOTHING_FACTOR;
|
||||
dataArrayRef.current = new Uint8Array(
|
||||
analyserRef.current.frequencyBinCount
|
||||
);
|
||||
}, []);
|
||||
|
||||
const startRecording = async () => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { sampleRate: 16000, channelCount: 1, sampleSize: 16 },
|
||||
});
|
||||
|
||||
mediaRecorder.current = new MediaRecorder(stream);
|
||||
audioChunks.current = [];
|
||||
|
||||
mediaRecorder.current.ondataavailable = (e) => {
|
||||
audioChunks.current.push(e.data);
|
||||
};
|
||||
|
||||
mediaRecorder.current.start(500);
|
||||
updateState({ recording: true, error: undefined });
|
||||
} catch (err) {
|
||||
updateState({ error: "麦克风访问失败,请检查权限设置" });
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = async () => {
|
||||
if (!mediaRecorder.current) return;
|
||||
mediaRecorder.current.stop();
|
||||
// 更新状态为未录音
|
||||
updateState({ recording: false });
|
||||
mediaRecorder.current.onstop = async () => {
|
||||
try {
|
||||
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" });
|
||||
await processAudio(audioBlob);
|
||||
} finally {
|
||||
audioChunks.current = [];
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const processAudio = async (audioBlob: Blob) => {
|
||||
// 处理音频的函数
|
||||
const formData = new FormData();
|
||||
formData.append("audio", audioBlob, "recording.wav");
|
||||
try {
|
||||
updateState({ transcribing: true }); // 设置转录状态为true
|
||||
// 发送请求到后端
|
||||
const asrResponse = await fetch("http://localhost:5000/asr", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
// 如果请求失败,则抛出错误
|
||||
if (!asrResponse.ok) throw new Error("语音识别失败");
|
||||
// 获取后端返回的文本
|
||||
const asrData = await asrResponse.json();
|
||||
setAsrText(asrData.asr_text);
|
||||
updateState({ transcribing: false, thinking: true });
|
||||
|
||||
// 发送请求到后端,生成回答
|
||||
const generateResponse = await fetch("http://localhost:5000/generate", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ asr_text: asrData.asr_text }),
|
||||
});
|
||||
|
||||
if (!generateResponse.ok) throw new Error("生成回答失败");
|
||||
|
||||
const generateData = await generateResponse.json(); //获取生成的回答,设置为answerText
|
||||
setAnswerText(generateData.answer_text);
|
||||
updateState({ thinking: false, synthesizing: true });
|
||||
|
||||
// 播放合成的音频,增加可视化效果
|
||||
if (audioElement.current) {
|
||||
//设置说话状态
|
||||
updateState({ synthesizing: false, speaking: true }); // 替代setIsSpeaking(true)
|
||||
initializeAudioContext(); // 初始化音频上下文
|
||||
// 播放合成的音频
|
||||
//audioElement.current.src = `http://localhost:5000${generateData.audio_url}`;
|
||||
const audio = new Audio(
|
||||
`http://localhost:5000${generateData.audio_url}`
|
||||
); // 创建音频元素
|
||||
const source = audioContextRef.current!.createMediaElementSource(audio); // 创建音频源
|
||||
source.connect(analyserRef.current!); // 连接到分析器
|
||||
analyserRef.current!.connect(audioContextRef.current!.destination); // 连接到目标
|
||||
//播放结束设置说话状态为false
|
||||
audio.onended = () => {
|
||||
updateState({ speaking: false }); // 替代setIsSpeaking(false)
|
||||
};
|
||||
try {
|
||||
await audio.play(); // 播放音频
|
||||
startVisualization(); // 开始可视化效果
|
||||
} catch (err) {
|
||||
console.error("播放失败:", err);
|
||||
updateState({ error: "音频播放失败" });
|
||||
useEffect(() => {
|
||||
const handleStreamChunk = (_: unknown, chunk: string) => {
|
||||
currentReplyRef.current += chunk;
|
||||
setMessages(prev => {
|
||||
const lastMessage = prev[prev.length - 1];
|
||||
if (lastMessage?.role === "assistant") {
|
||||
return [
|
||||
...prev.slice(0, -1),
|
||||
{ role: "assistant", content: currentReplyRef.current }
|
||||
];
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
updateState({ error: err instanceof Error ? err.message : "未知错误" });
|
||||
} finally {
|
||||
updateState({
|
||||
transcribing: false,
|
||||
generating: false,
|
||||
synthesizing: false,
|
||||
return [...prev, { role: "assistant", content: currentReplyRef.current }];
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusText = () => {
|
||||
if (processState.error) return processState.error;
|
||||
if (processState.recording) return "请说... 🎤"; //录音
|
||||
if (processState.transcribing) return "识别音频中... 🔍"; //语音转文字
|
||||
if (processState.thinking) return "正在思考中... 💡"; // 等待AI回复
|
||||
if (processState.generating) return "生成回答中... 💡"; // AI以文字形式回复中//未使用
|
||||
if (processState.synthesizing) return "整理话语中... 🎶"; //收到AI回复,正在合成语音//未使用
|
||||
if (processState.speaking) return "说话中... 🗣📢"; // 播放合成后的语音
|
||||
return "对话未开始🎙️";
|
||||
};
|
||||
|
||||
const startVisualization = useCallback(() => {
|
||||
if (!analyserRef.current || !dataArrayRef.current || !barsRef.current) {
|
||||
console.warn("可视化组件未就绪");
|
||||
return;
|
||||
}
|
||||
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
|
||||
const bufferLength = analyserRef.current.frequencyBinCount;
|
||||
const updateBars = () => {
|
||||
try {
|
||||
analyserRef.current!.getByteFrequencyData(dataArrayRef.current!);
|
||||
|
||||
const bars = barsRef.current!.children;
|
||||
for (let i = 0; i < bars.length; i++) {
|
||||
const bar = bars[i] as HTMLElement;
|
||||
const dataIndex = Math.floor((i / BAR_COUNT) * (bufferLength / 2));
|
||||
const rawValue =
|
||||
(dataArrayRef.current![dataIndex] / 255) * 100 * VOLUME_SENSITIVITY;
|
||||
|
||||
const smoothValue = Math.min(
|
||||
100,
|
||||
Math.max(10, rawValue * 0.6 + lastValuesRef.current[i] * 0.4)
|
||||
);
|
||||
lastValuesRef.current[i] = smoothValue;
|
||||
|
||||
bar.style.cssText = `
|
||||
height: ${smoothValue}%;
|
||||
transform: scaleY(${0.8 + (smoothValue / 100) * 0.6});
|
||||
transition: ${i === 0 ? "none" : "height 50ms linear"};
|
||||
`;
|
||||
}
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updateBars);
|
||||
} catch (err) {
|
||||
console.error("可视化更新失败:", err);
|
||||
}
|
||||
};
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updateBars);
|
||||
}, [analyserRef, dataArrayRef, barsRef]);
|
||||
const handleStreamEnd = () => {
|
||||
currentReplyRef.current = "";
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
// 切换监听状态
|
||||
const toggleListening = useCallback(async () => {
|
||||
if (isListening) {
|
||||
// 如果正在监听
|
||||
await cleanupAudio(); // 清理现有音频
|
||||
} else {
|
||||
// 否则
|
||||
try {
|
||||
// 尝试
|
||||
await cleanupAudio(); // 清理现有音频
|
||||
initializeAudioContext(); // 初始化音频上下文
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { noiseSuppression: true, echoCancellation: true },
|
||||
});
|
||||
mediaStreamRef.current = stream; // 设置媒体流
|
||||
const source = audioContextRef.current!.createMediaStreamSource(stream);
|
||||
source.connect(analyserRef.current!); // 只连接到分析器,不连接到目标
|
||||
//analyserRef.current!.connect(audioContextRef.current!.destination); // 连接到目标
|
||||
startVisualization(); // 开始可视化
|
||||
} catch (err) {
|
||||
console.error("初始化失败:", err);
|
||||
updateState({ error: "音频初始化失败" });
|
||||
}
|
||||
const handleStreamError = (_: unknown, errorMsg: string) => {
|
||||
setIsLoading(false);
|
||||
setMessages(prev => [
|
||||
...prev,
|
||||
{ role: "assistant", content: `Error: ${errorMsg}` }
|
||||
]);
|
||||
};
|
||||
|
||||
window.electronAPI.onChatStreamChunk(handleStreamChunk);
|
||||
window.electronAPI.onChatStreamEnd(handleStreamEnd);
|
||||
window.electronAPI.onChatStreamError(handleStreamError);
|
||||
|
||||
return () => {
|
||||
window.electronAPI.removeListener("chat-stream-chunk", handleStreamChunk);
|
||||
window.electronAPI.removeListener("chat-stream-end", handleStreamEnd);
|
||||
window.electronAPI.removeListener("chat-stream-error", handleStreamError);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!input.trim() || isLoading) return;
|
||||
|
||||
const userMessage: Message = { role: "user", content: input };
|
||||
const newMessages = [...messages, userMessage];
|
||||
|
||||
setMessages(newMessages);
|
||||
setInput("");
|
||||
setIsLoading(true);
|
||||
currentReplyRef.current = "";
|
||||
|
||||
try {
|
||||
await window.electronAPI.chatWithDeepseek(newMessages);
|
||||
} catch (error) {
|
||||
console.error("Chat error:", error);
|
||||
setIsLoading(false);
|
||||
}
|
||||
setIsListening((prev) => !prev);
|
||||
}, [isListening, cleanupAudio, initializeAudioContext, startVisualization]);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="absolute top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2 text-center w-full px-4">
|
||||
{/* 问候语 */}
|
||||
<h1 className="text-6xl font-light mb-8 drop-shadow-glow">{greeting}</h1>
|
||||
{/* 较小较细的字体显示{asrText || "等待语音输入..."}*/}
|
||||
<h3 className="text-sm font-light mb-8">{asrText || "等待中..."}</h3>
|
||||
{/*较小较细的字体显示{answerText || "等待生成回答..."}*/}
|
||||
<h2 className="text-sm font-light mb-8">
|
||||
{answerText || "AI助手待命中"}
|
||||
</h2>
|
||||
|
||||
{/* 音频波形可视化 */}
|
||||
<div className="relative inline-block">
|
||||
<button
|
||||
onClick={() => {
|
||||
toggleListening();
|
||||
processState.recording ? stopRecording() : startRecording();
|
||||
}}
|
||||
className={[
|
||||
"group relative flex h-20 items-end gap-1.5 rounded-[32px] p-6",
|
||||
"transition-all duration-300 ease-[cubic-bezier(0.68,-0.55,0.27,1.55)]",
|
||||
].join(" ")}
|
||||
style={{
|
||||
backdropFilter: "blur(16px)",
|
||||
WebkitBackdropFilter: "blur(16px)",
|
||||
}}
|
||||
>
|
||||
{/* 增强版音频波形 */}
|
||||
<div ref={barsRef} className="flex h-full w-full items-end gap-2.5">
|
||||
{[...Array(BAR_COUNT)].map((_, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className={[
|
||||
"w-2.5 rounded-lg",
|
||||
"bg-gradient-to-t from-cyan-400 via-blue-400/80 to-purple-500",
|
||||
"transition-all duration-200 ease-out",
|
||||
!processState.recording && !processState.speaking ? "animate-audio-wave" : "",
|
||||
].join(" ")}
|
||||
style={{
|
||||
height: "12%",
|
||||
animationDelay: `${index * 0.08}s`, // 保持原有延迟设置
|
||||
boxShadow: `
|
||||
0 0 12px -2px rgba(52,211,254,0.6),
|
||||
inset 0 2px 4px rgba(255,255,255,0.2)
|
||||
`,
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
<div className="p-4 border rounded-lg bg-gray-900 border-gray-700 shadow-xl">
|
||||
<div className="mb-4 h-64 overflow-y-auto">
|
||||
{messages.map((msg, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className={`mb-3 p-3 rounded-lg ${
|
||||
msg.role === "user"
|
||||
? "bg-blue-800/30 text-blue-200"
|
||||
: "bg-gray-800/50 text-gray-300"
|
||||
} backdrop-blur-sm`}
|
||||
>
|
||||
<strong className="font-semibold text-sm">
|
||||
{msg.role === "user" ? "You" : "AI"}:
|
||||
</strong>
|
||||
<p className="mt-1 text-gray-100 leading-relaxed whitespace-pre-wrap">
|
||||
{msg.content}
|
||||
</p>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
{isLoading && (
|
||||
<div className="mb-3 p-3 rounded-lg bg-gray-800/50 backdrop-blur-sm">
|
||||
<div className="flex items-center text-gray-400">
|
||||
<span className="flex-1 font-semibold">Thinking...</span>
|
||||
<div className="flex space-x-1">
|
||||
<div className="w-2 h-2 bg-blue-500 rounded-full animate-bounce"></div>
|
||||
<div className="w-2 h-2 bg-blue-500 rounded-full animate-bounce delay-100"></div>
|
||||
<div className="w-2 h-2 bg-blue-500 rounded-full animate-bounce delay-200"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* 底部状态信息 */}
|
||||
<div className="mt-8 text-xs text-gray-500 space-y-1">
|
||||
<p>支持唤醒词:"你好千问"</p>
|
||||
<div className="flex items-center justify-center gap-1.5">
|
||||
<div className="relative flex items-center">
|
||||
{/* 呼吸圆点指示器 */}
|
||||
<div
|
||||
className={`w-2 h-2 rounded-full ${
|
||||
isListening ? "bg-green-400 animate-breath" : "bg-gray-400"
|
||||
}`}
|
||||
/>
|
||||
{/* 扩散波纹效果 */}
|
||||
{isListening && (
|
||||
<div className="absolute inset-0 rounded-full bg-green-400/20 animate-ping" />
|
||||
)}
|
||||
</div>
|
||||
<span>{getStatusText()}</span>
|
||||
</div>
|
||||
|
||||
{/* 音频播放 */}
|
||||
<audio
|
||||
ref={audioElement}
|
||||
//controls={process.env.NODE_ENV === "development"} // 开发环境显示 controls
|
||||
//onEnded={() => updateState({ ,设置animate-audio-wave显示状态为true
|
||||
<form onSubmit={handleSubmit} className="flex gap-3">
|
||||
<input
|
||||
type="text"
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
className="flex-1 px-4 py-2 bg-gray-800 border border-gray-700 rounded-xl
|
||||
text-gray-200 placeholder-gray-500 focus:outline-none
|
||||
focus:border-blue-500 focus:ring-2 focus:ring-blue-500/30
|
||||
transition-all duration-200"
|
||||
placeholder="输入消息..."
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
type="submit"
|
||||
className={`px-6 py-2 rounded-xl font-medium transition-all duration-200
|
||||
${
|
||||
isLoading
|
||||
? "bg-gray-700 text-gray-500 cursor-not-allowed"
|
||||
: "bg-blue-600 hover:bg-blue-700 text-white"
|
||||
}`}
|
||||
disabled={isLoading}
|
||||
>
|
||||
{isLoading ? "发送中..." : "发送"}
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default VoiceAssistant;
|
||||
export default VoiceAssistant;
|
420
src/components/VoiceAssistant1.0.tsx
Normal file
420
src/components/VoiceAssistant1.0.tsx
Normal file
@ -0,0 +1,420 @@
|
||||
import { useState, useRef, useCallback } from "react";
|
||||
|
||||
interface ProcessState {
|
||||
recording: boolean;
|
||||
transcribing: boolean;
|
||||
generating: boolean;
|
||||
synthesizing: boolean;
|
||||
error?: string;
|
||||
thinking: boolean;
|
||||
speaking: boolean;
|
||||
}
|
||||
|
||||
interface VoiceAssistantProps {
|
||||
greeting: string;
|
||||
}
|
||||
|
||||
const ANALYSER_FFT_SIZE = 128;
|
||||
const VOLUME_SENSITIVITY = 1.5;
|
||||
const SMOOTHING_FACTOR = 0.7;
|
||||
const BAR_COUNT = 12;
|
||||
|
||||
const VoiceAssistant = ({ greeting }: VoiceAssistantProps) => {
|
||||
const [isListening, setIsListening] = useState(false);
|
||||
const [processState, setProcessState] = useState<ProcessState>({
|
||||
recording: false,
|
||||
transcribing: false,
|
||||
generating: false,
|
||||
synthesizing: false,
|
||||
error: undefined,
|
||||
thinking: false,
|
||||
speaking: false,
|
||||
});
|
||||
const [asrText, setAsrText] = useState("");
|
||||
const [answerText, setAnswerText] = useState("");
|
||||
const mediaRecorder = useRef<MediaRecorder | null>(null);
|
||||
const audioChunks = useRef<Blob[]>([]);
|
||||
const audioElement = useRef<HTMLAudioElement>(null);
|
||||
const barsRef = useRef<HTMLDivElement>(null);
|
||||
const mediaStreamRef = useRef<MediaStream | null>(null);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const analyserRef = useRef<AnalyserNode | null>(null);
|
||||
const animationFrameRef = useRef<number | null>(null);
|
||||
const dataArrayRef = useRef<Uint8Array | null>(null);
|
||||
const lastValuesRef = useRef<number[]>(new Array(BAR_COUNT).fill(10));
|
||||
const updateState = (newState: Partial<ProcessState>) => {
|
||||
setProcessState((prev) => ({ ...prev, ...newState }));
|
||||
};
|
||||
|
||||
const cleanupAudio = useCallback(async () => {
|
||||
mediaStreamRef.current?.getTracks().forEach((track) => track.stop());
|
||||
if (audioContextRef.current?.state !== "closed") {
|
||||
await audioContextRef.current?.close();
|
||||
}
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
}, []);
|
||||
const initializeAudioContext = useCallback(() => {
|
||||
const AudioContextClass =
|
||||
window.AudioContext || (window as any).webkitAudioContext;
|
||||
audioContextRef.current = new AudioContextClass();
|
||||
analyserRef.current = audioContextRef.current.createAnalyser();
|
||||
analyserRef.current.fftSize = ANALYSER_FFT_SIZE;
|
||||
analyserRef.current.smoothingTimeConstant = SMOOTHING_FACTOR;
|
||||
dataArrayRef.current = new Uint8Array(
|
||||
analyserRef.current.frequencyBinCount
|
||||
);
|
||||
}, []);
|
||||
|
||||
const startRecording = async () => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { sampleRate: 16000, channelCount: 1, sampleSize: 16 },
|
||||
});
|
||||
|
||||
mediaRecorder.current = new MediaRecorder(stream);
|
||||
audioChunks.current = [];
|
||||
|
||||
mediaRecorder.current.ondataavailable = (e) => {
|
||||
audioChunks.current.push(e.data);
|
||||
};
|
||||
|
||||
mediaRecorder.current.start(500);
|
||||
updateState({ recording: true, error: undefined });
|
||||
} catch (err) {
|
||||
updateState({ error: "麦克风访问失败,请检查权限设置" });
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = async () => {
|
||||
if (!mediaRecorder.current) return;
|
||||
mediaRecorder.current.stop();
|
||||
// 更新状态为未录音
|
||||
updateState({ recording: false });
|
||||
mediaRecorder.current.onstop = async () => {
|
||||
try {
|
||||
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" });
|
||||
await processAudio(audioBlob);
|
||||
} finally {
|
||||
audioChunks.current = [];
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/*const processAudio = async (audioBlob: Blob) => {
|
||||
// 处理音频的函数
|
||||
const formData = new FormData();
|
||||
formData.append("audio", audioBlob, "recording.wav");
|
||||
try {
|
||||
updateState({ transcribing: true }); // 设置转录状态为true
|
||||
// 发送请求到后端
|
||||
const asrResponse = await fetch("http://localhost:5000/asr", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
// 如果请求失败,则抛出错误
|
||||
if (!asrResponse.ok) throw new Error("语音识别失败");
|
||||
// 获取后端返回的文本
|
||||
const asrData = await asrResponse.json();
|
||||
setAsrText(asrData.asr_text);
|
||||
updateState({ transcribing: false, thinking: true });
|
||||
|
||||
// 发送请求到后端,生成回答
|
||||
const generateResponse = await fetch("http://localhost:5000/generate", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ asr_text: asrData.asr_text }),
|
||||
});
|
||||
|
||||
if (!generateResponse.ok) throw new Error("生成回答失败");
|
||||
|
||||
const generateData = await generateResponse.json(); //获取生成的回答,设置为answerText
|
||||
setAnswerText(generateData.answer_text);
|
||||
updateState({ thinking: false, synthesizing: true });
|
||||
|
||||
// 播放合成的音频,增加可视化效果
|
||||
if (audioElement.current) {
|
||||
//设置说话状态
|
||||
updateState({ synthesizing: false, speaking: true }); // 替代setIsSpeaking(true)
|
||||
initializeAudioContext(); // 初始化音频上下文
|
||||
// 播放合成的音频
|
||||
//audioElement.current.src = `http://localhost:5000${generateData.audio_url}`;
|
||||
const audio = new Audio(
|
||||
`http://localhost:5000${generateData.audio_url}`
|
||||
); // 创建音频元素
|
||||
const source = audioContextRef.current!.createMediaElementSource(audio); // 创建音频源
|
||||
source.connect(analyserRef.current!); // 连接到分析器
|
||||
analyserRef.current!.connect(audioContextRef.current!.destination); // 连接到目标
|
||||
//播放结束设置说话状态为false
|
||||
audio.onended = () => {
|
||||
updateState({ speaking: false }); // 替代setIsSpeaking(false)
|
||||
};
|
||||
try {
|
||||
await audio.play(); // 播放音频
|
||||
startVisualization(); // 开始可视化效果
|
||||
} catch (err) {
|
||||
console.error("播放失败:", err);
|
||||
updateState({ error: "音频播放失败" });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
updateState({ error: err instanceof Error ? err.message : "未知错误" });
|
||||
} finally {
|
||||
updateState({
|
||||
transcribing: false,
|
||||
generating: false,
|
||||
synthesizing: false,
|
||||
});
|
||||
}
|
||||
};*/
|
||||
const processAudio = async (audioBlob: Blob) => {
|
||||
const formData = new FormData();
|
||||
formData.append("audio", audioBlob, "recording.wav");
|
||||
|
||||
try {
|
||||
updateState({ transcribing: true });
|
||||
|
||||
// Step 1: 语音识别
|
||||
const asrResponse = await fetch("http://localhost:5000/asr", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
if (!asrResponse.ok) throw new Error("语音识别失败");
|
||||
const asrData = await asrResponse.json();
|
||||
setAsrText(asrData.asr_text);
|
||||
updateState({ transcribing: false, thinking: true });
|
||||
|
||||
// Step 2: 获取大模型回复(新增独立请求)
|
||||
const generateTextResponse = await fetch("http://localhost:5000/generate_text", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ asr_text: asrData.asr_text }),
|
||||
});
|
||||
if (!generateTextResponse.ok) throw new Error("生成回答失败");
|
||||
const textData = await generateTextResponse.json();
|
||||
setAnswerText(textData.answer_text); // 立即显示回复文本
|
||||
updateState({ thinking: false, synthesizing: true });
|
||||
|
||||
// Step 3: 单独请求语音合成(新增)
|
||||
const generateAudioResponse = await fetch("http://localhost:5000/generate_audio", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ answer_text: textData.answer_text }),
|
||||
});
|
||||
if (!generateAudioResponse.ok) throw new Error("语音合成失败");
|
||||
const audioData = await generateAudioResponse.json();
|
||||
|
||||
// 播放音频
|
||||
if (audioElement.current) {
|
||||
updateState({ synthesizing: false, speaking: true });
|
||||
initializeAudioContext();
|
||||
|
||||
const audio = new Audio(`http://localhost:5000${audioData.audio_url}`);
|
||||
const source = audioContextRef.current!.createMediaElementSource(audio);
|
||||
source.connect(analyserRef.current!);
|
||||
analyserRef.current!.connect(audioContextRef.current!.destination);
|
||||
|
||||
audio.onended = () => {
|
||||
updateState({ speaking: false });
|
||||
};
|
||||
|
||||
try {
|
||||
await audio.play();
|
||||
startVisualization();
|
||||
} catch (err) {
|
||||
console.error("播放失败:", err);
|
||||
updateState({ error: "音频播放失败" });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
updateState({ error: err instanceof Error ? err.message : "未知错误" });
|
||||
} finally {
|
||||
updateState({
|
||||
transcribing: false,
|
||||
generating: false,
|
||||
synthesizing: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// 状态提示更新
|
||||
const getStatusText = () => {
|
||||
if (processState.error) return processState.error;
|
||||
if (processState.recording) return "请说... 🎤";
|
||||
if (processState.transcribing) return "识别音频中... 🔍";
|
||||
if (processState.thinking) return "正在思考中... 💡";
|
||||
if (processState.synthesizing) return "合成语音中... 🎶"; // 更新状态提示
|
||||
if (processState.speaking) return "说话中... 🗣📢";
|
||||
return "对话未开始🎙️";
|
||||
};
|
||||
|
||||
/*const getStatusText = () => {
|
||||
if (processState.error) return processState.error;
|
||||
if (processState.recording) return "请说... 🎤"; //录音
|
||||
if (processState.transcribing) return "识别音频中... 🔍"; //语音转文字
|
||||
if (processState.thinking) return "正在思考中... 💡"; // 等待AI回复
|
||||
if (processState.generating) return "生成回答中... 💡"; // AI以文字形式回复中//未使用
|
||||
if (processState.synthesizing) return "整理话语中... 🎶"; //收到AI回复,正在合成语音//未使用
|
||||
if (processState.speaking) return "说话中... 🗣📢"; // 播放合成后的语音
|
||||
return "对话未开始🎙️";
|
||||
};*/
|
||||
|
||||
const startVisualization = useCallback(() => {
|
||||
if (!analyserRef.current || !dataArrayRef.current || !barsRef.current) {
|
||||
console.warn("可视化组件未就绪");
|
||||
return;
|
||||
}
|
||||
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
|
||||
const bufferLength = analyserRef.current.frequencyBinCount;
|
||||
const updateBars = () => {
|
||||
try {
|
||||
analyserRef.current!.getByteFrequencyData(dataArrayRef.current!);
|
||||
|
||||
const bars = barsRef.current!.children;
|
||||
for (let i = 0; i < bars.length; i++) {
|
||||
const bar = bars[i] as HTMLElement;
|
||||
const dataIndex = Math.floor((i / BAR_COUNT) * (bufferLength / 2));
|
||||
const rawValue =
|
||||
(dataArrayRef.current![dataIndex] / 255) * 100 * VOLUME_SENSITIVITY;
|
||||
|
||||
const smoothValue = Math.min(
|
||||
100,
|
||||
Math.max(10, rawValue * 0.6 + lastValuesRef.current[i] * 0.4)
|
||||
);
|
||||
lastValuesRef.current[i] = smoothValue;
|
||||
|
||||
bar.style.cssText = `
|
||||
height: ${smoothValue}%;
|
||||
transform: scaleY(${0.8 + (smoothValue / 100) * 0.6});
|
||||
transition: ${i === 0 ? "none" : "height 50ms linear"};
|
||||
`;
|
||||
}
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updateBars);
|
||||
} catch (err) {
|
||||
console.error("可视化更新失败:", err);
|
||||
}
|
||||
};
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updateBars);
|
||||
}, [analyserRef, dataArrayRef, barsRef]);
|
||||
|
||||
// 切换监听状态
|
||||
const toggleListening = useCallback(async () => {
|
||||
if (isListening) {
|
||||
// 如果正在监听
|
||||
await cleanupAudio(); // 清理现有音频
|
||||
} else {
|
||||
// 否则
|
||||
try {
|
||||
// 尝试
|
||||
await cleanupAudio(); // 清理现有音频
|
||||
initializeAudioContext(); // 初始化音频上下文
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { noiseSuppression: true, echoCancellation: true },
|
||||
});
|
||||
mediaStreamRef.current = stream; // 设置媒体流
|
||||
const source = audioContextRef.current!.createMediaStreamSource(stream);
|
||||
source.connect(analyserRef.current!); // 只连接到分析器,不连接到目标
|
||||
//analyserRef.current!.connect(audioContextRef.current!.destination); // 连接到目标
|
||||
startVisualization(); // 开始可视化
|
||||
} catch (err) {
|
||||
console.error("初始化失败:", err);
|
||||
updateState({ error: "音频初始化失败" });
|
||||
}
|
||||
}
|
||||
setIsListening((prev) => !prev);
|
||||
}, [isListening, cleanupAudio, initializeAudioContext, startVisualization]);
|
||||
|
||||
return (
|
||||
<div className="absolute top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2 text-center w-full px-4">
|
||||
{/* 问候语 */}
|
||||
<h1 className="text-6xl font-light mb-8 drop-shadow-glow">{greeting}</h1>
|
||||
{/* 较小较细的字体显示{asrText || "等待语音输入..."}*/}
|
||||
<h3 className="text-sm font-light mb-8">{asrText || "等待中..."}</h3>
|
||||
{/*较小较细的字体显示{answerText || "等待生成回答..."}*/}
|
||||
<h2 className="text-sm font-light mb-8">
|
||||
{answerText || "AI助手待命中"}
|
||||
</h2>
|
||||
|
||||
{/* 音频波形可视化 */}
|
||||
<div className="relative inline-block">
|
||||
<button
|
||||
onClick={() => {
|
||||
toggleListening();
|
||||
processState.recording ? stopRecording() : startRecording();
|
||||
}}
|
||||
className={[
|
||||
"group relative flex h-20 items-end gap-1.5 rounded-[32px] p-6",
|
||||
"transition-all duration-300 ease-[cubic-bezier(0.68,-0.55,0.27,1.55)]",
|
||||
].join(" ")}
|
||||
style={{
|
||||
backdropFilter: "blur(16px)",
|
||||
WebkitBackdropFilter: "blur(16px)",
|
||||
}}
|
||||
>
|
||||
{/* 增强版音频波形 */}
|
||||
<div ref={barsRef} className="flex h-full w-full items-end gap-2.5">
|
||||
{[...Array(BAR_COUNT)].map((_, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className={[
|
||||
"w-2.5 rounded-lg",
|
||||
"bg-gradient-to-t from-cyan-400 via-blue-400/80 to-purple-500",
|
||||
"transition-all duration-200 ease-out",
|
||||
!processState.recording && !processState.speaking ? "animate-audio-wave" : "",
|
||||
].join(" ")}
|
||||
style={{
|
||||
height: "12%",
|
||||
animationDelay: `${index * 0.08}s`, // 保持原有延迟设置
|
||||
boxShadow: `
|
||||
0 0 12px -2px rgba(52,211,254,0.6),
|
||||
inset 0 2px 4px rgba(255,255,255,0.2)
|
||||
`,
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* 底部状态信息 */}
|
||||
<div className="mt-8 text-xs text-gray-500 space-y-1">
|
||||
<p>支持唤醒词:"你好千问"</p>
|
||||
<div className="flex items-center justify-center gap-1.5">
|
||||
<div className="relative flex items-center">
|
||||
{/* 呼吸圆点指示器 */}
|
||||
<div
|
||||
className={`w-2 h-2 rounded-full ${
|
||||
isListening ? "bg-green-400 animate-breath" : "bg-gray-400"
|
||||
}`}
|
||||
/>
|
||||
{/* 扩散波纹效果 */}
|
||||
{isListening && (
|
||||
<div className="absolute inset-0 rounded-full bg-green-400/20 animate-ping" />
|
||||
)}
|
||||
</div>
|
||||
<span>{getStatusText()}</span>
|
||||
</div>
|
||||
|
||||
{/* 音频播放 */}
|
||||
<audio
|
||||
ref={audioElement}
|
||||
//controls={process.env.NODE_ENV === "development"} // 开发环境显示 controls
|
||||
//onEnded={() => updateState({ ,设置animate-audio-wave显示状态为true
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default VoiceAssistant;
|
@ -59,7 +59,7 @@ const WeatherSection: FC = () => {
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
<WeatherIcon type={data.realtime.skycon} className="!w-16 h-16" />
|
||||
<WeatherIcon type={data.realtime.skycon} className="!w-16 !h-16" />
|
||||
<div>
|
||||
<div className="text-3xl">{data.realtime.temperature}°C</div>
|
||||
<div className="text-gray-400 text-sm">
|
||||
|
17
src/vite-env.d.ts
vendored
17
src/vite-env.d.ts
vendored
@ -1 +1,18 @@
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
interface Message {
|
||||
role: 'user' | 'assistant' | 'system';
|
||||
content: string;
|
||||
}
|
||||
|
||||
declare interface Window {
|
||||
electronAPI: {
|
||||
getWeather: (params: { lon: number; lat: number }) => Promise<any>;
|
||||
getNews: () => Promise<any>;
|
||||
chatWithDeepseek: (messages: Message[]) => Promise<void>;
|
||||
onChatStreamChunk: (callback: (event: unknown, chunk: string) => void) => void;
|
||||
onChatStreamEnd: (callback: () => void) => void;
|
||||
onChatStreamError: (callback: (event: unknown, error: string) => void) => void;
|
||||
removeListener: (channel: string, callback: (...args: any[]) => void) => void;
|
||||
};
|
||||
}
|
Loading…
Reference in New Issue
Block a user