feat: 添加音频录制模块及示例,支持纯JavaScript环境
This commit is contained in:
246
web/src/apps/muse/voice/modules/AudioRecorder.README.md
Normal file
246
web/src/apps/muse/voice/modules/AudioRecorder.README.md
Normal file
@@ -0,0 +1,246 @@
|
||||
/**
|
||||
* Audio Recorder Module Documentation
|
||||
*
|
||||
* @description AudioRecorder模块使用说明文档
|
||||
* @tags audio, recorder, documentation, audioworklet
|
||||
* @createdAt 2025-12-24
|
||||
*/
|
||||
|
||||
# AudioRecorder 音频录制模块
|
||||
|
||||
## 概述
|
||||
|
||||
`AudioRecorder` 是一个独立的音频录制类,使用现代的 `AudioWorklet` API 替代了已弃用的 `ScriptProcessorNode`。该模块可以在纯 JavaScript 环境中运行,不依赖于 React Hooks。
|
||||
|
||||
## 主要特性
|
||||
|
||||
- ✅ 使用 AudioWorklet API(替代已弃用的 ScriptProcessorNode)
|
||||
- ✅ 独立的类设计,不依赖 React Hooks
|
||||
- ✅ 可在纯 JavaScript 环境运行
|
||||
- ✅ 支持自定义采样率和缓冲区大小
|
||||
- ✅ 内置音频数据格式转换(Float32Array 转 Base64)
|
||||
- ✅ 完善的资源清理机制
|
||||
- ✅ TypeScript 类型支持
|
||||
|
||||
## 安装
|
||||
|
||||
该模块已包含在项目中,位于 `src/apps/muse/voice/modules/AudioRecorder.ts`
|
||||
|
||||
## 基本使用
|
||||
|
||||
### 1. 创建实例
|
||||
|
||||
```typescript
|
||||
import { AudioRecorder } from './modules/AudioRecorder';
|
||||
|
||||
const recorder = new AudioRecorder({
|
||||
sampleRate: 16000, // 采样率,默认 16000
|
||||
bufferSize: 4096, // 缓冲区大小,默认 4096
|
||||
});
|
||||
```
|
||||
|
||||
### 2. 设置音频数据回调
|
||||
|
||||
```typescript
|
||||
recorder.onAudioData((audioData: Float32Array) => {
|
||||
// 处理音频数据
|
||||
console.log('Received audio data:', audioData);
|
||||
|
||||
// 可以转换为 Base64
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
console.log('Base64 data:', base64);
|
||||
});
|
||||
```
|
||||
|
||||
### 3. 开始录制
|
||||
|
||||
```typescript
|
||||
try {
|
||||
await recorder.start();
|
||||
console.log('Recording started');
|
||||
} catch (error) {
|
||||
console.error('Failed to start recording:', error);
|
||||
}
|
||||
```
|
||||
|
||||
### 4. 停止录制
|
||||
|
||||
```typescript
|
||||
try {
|
||||
await recorder.stop();
|
||||
console.log('Recording stopped');
|
||||
} catch (error) {
|
||||
console.error('Failed to stop recording:', error);
|
||||
}
|
||||
```
|
||||
|
||||
### 5. 销毁实例
|
||||
|
||||
```typescript
|
||||
await recorder.destroy();
|
||||
```
|
||||
|
||||
## React 组件中使用
|
||||
|
||||
```typescript
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { AudioRecorder } from './modules/AudioRecorder';
|
||||
|
||||
export const RecordingComponent = () => {
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const recorderRef = useRef<AudioRecorder | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// 初始化录制器
|
||||
recorderRef.current = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 设置音频数据回调
|
||||
recorderRef.current.onAudioData((audioData) => {
|
||||
// 处理音频数据
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
// 发送到服务器或进行其他处理
|
||||
});
|
||||
|
||||
// 清理函数
|
||||
return () => {
|
||||
recorderRef.current?.destroy();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleStart = async () => {
|
||||
try {
|
||||
await recorderRef.current?.start();
|
||||
setIsRecording(true);
|
||||
} catch (error) {
|
||||
console.error('Error starting recording:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleStop = async () => {
|
||||
try {
|
||||
await recorderRef.current?.stop();
|
||||
setIsRecording(false);
|
||||
} catch (error) {
|
||||
console.error('Error stopping recording:', error);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<button onClick={isRecording ? handleStop : handleStart}>
|
||||
{isRecording ? 'Stop Recording' : 'Start Recording'}
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## 纯 JavaScript 使用
|
||||
|
||||
```javascript
|
||||
import { AudioRecorder } from './AudioRecorder.js';
|
||||
|
||||
// 创建实例
|
||||
const recorder = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 设置回调
|
||||
recorder.onAudioData((audioData) => {
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
console.log('Audio data:', base64);
|
||||
});
|
||||
|
||||
// 开始录制
|
||||
document.getElementById('startBtn').addEventListener('click', async () => {
|
||||
await recorder.start();
|
||||
});
|
||||
|
||||
// 停止录制
|
||||
document.getElementById('stopBtn').addEventListener('click', async () => {
|
||||
await recorder.stop();
|
||||
});
|
||||
```
|
||||
|
||||
## API 文档
|
||||
|
||||
### 构造函数
|
||||
|
||||
```typescript
|
||||
constructor(config?: AudioRecorderConfig)
|
||||
```
|
||||
|
||||
#### 参数
|
||||
|
||||
- `config.sampleRate` (number, 可选): 音频采样率,默认 16000 Hz
|
||||
- `config.bufferSize` (number, 可选): 音频缓冲区大小,默认 4096
|
||||
|
||||
### 方法
|
||||
|
||||
#### `onAudioData(callback: AudioDataCallback): void`
|
||||
|
||||
设置音频数据回调函数。
|
||||
|
||||
- `callback`: 接收 `Float32Array` 类型的音频数据
|
||||
|
||||
#### `async start(): Promise<void>`
|
||||
|
||||
开始录制音频。会请求麦克风权限。
|
||||
|
||||
#### `async stop(): Promise<void>`
|
||||
|
||||
停止录制音频并清理资源。
|
||||
|
||||
#### `getIsRecording(): boolean`
|
||||
|
||||
获取当前录制状态。
|
||||
|
||||
#### `async destroy(): Promise<void>`
|
||||
|
||||
销毁录制器实例并清理所有资源。
|
||||
|
||||
#### `static float32ArrayToBase64(float32Array: Float32Array): string`
|
||||
|
||||
静态方法:将 Float32Array 转换为 Base64 字符串。
|
||||
|
||||
## 技术细节
|
||||
|
||||
### AudioWorklet vs ScriptProcessorNode
|
||||
|
||||
| 特性 | ScriptProcessorNode (已弃用) | AudioWorklet |
|
||||
|------|----------------------------|--------------|
|
||||
| 执行环境 | 主线程 | 独立音频线程 |
|
||||
| 性能 | 可能阻塞 UI | 不阻塞 UI |
|
||||
| 延迟 | 较高 | 较低 |
|
||||
| 浏览器支持 | 已弃用 | 现代标准 |
|
||||
|
||||
### 浏览器兼容性
|
||||
|
||||
AudioWorklet API 支持:
|
||||
- Chrome 66+
|
||||
- Firefox 76+
|
||||
- Safari 14.1+
|
||||
- Edge 79+
|
||||
|
||||
## 注意事项
|
||||
|
||||
1. **HTTPS 要求**: 在生产环境中,麦克风访问需要 HTTPS 协议
|
||||
2. **用户权限**: 首次使用需要用户授予麦克风权限
|
||||
3. **资源清理**: 使用完毕后务必调用 `destroy()` 方法清理资源
|
||||
4. **错误处理**: 建议使用 try-catch 包裹异步方法调用
|
||||
|
||||
## 示例项目
|
||||
|
||||
参考 `src/apps/muse/voice/test/test-record.tsx` 查看完整的使用示例。
|
||||
|
||||
## 更新日志
|
||||
|
||||
### 2025-12-24
|
||||
- 初始版本发布
|
||||
- 使用 AudioWorklet 替代 ScriptProcessorNode
|
||||
- 支持独立使用,不依赖 React Hooks
|
||||
- 内置 Base64 转换工具
|
||||
202
web/src/apps/muse/voice/modules/AudioRecorder.example.ts
Normal file
202
web/src/apps/muse/voice/modules/AudioRecorder.example.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
/**
|
||||
* Audio Recorder Usage Example
|
||||
*
|
||||
* @description AudioRecorder在纯JavaScript环境中的使用示例
|
||||
* @tags audio, recorder, example, javascript
|
||||
* @createdAt 2025-12-24
|
||||
*/
|
||||
|
||||
import { AudioRecorder } from './AudioRecorder';
|
||||
|
||||
/**
|
||||
* 示例1: 基本使用
|
||||
*/
|
||||
export async function basicExample() {
|
||||
// 创建录制器实例
|
||||
const recorder = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 设置音频数据回调
|
||||
recorder.onAudioData((audioData) => {
|
||||
console.log('Received audio data, length:', audioData.length);
|
||||
|
||||
// 转换为 Base64
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
console.log('Base64 encoded:', base64.substring(0, 50) + '...');
|
||||
});
|
||||
|
||||
// 开始录制
|
||||
try {
|
||||
await recorder.start();
|
||||
console.log('✅ Recording started successfully');
|
||||
|
||||
// 录制5秒后停止
|
||||
setTimeout(async () => {
|
||||
await recorder.stop();
|
||||
console.log('✅ Recording stopped');
|
||||
|
||||
// 清理资源
|
||||
await recorder.destroy();
|
||||
console.log('✅ Recorder destroyed');
|
||||
}, 5000);
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 示例2: 发送到WebSocket
|
||||
*/
|
||||
export async function websocketExample() {
|
||||
const ws = new WebSocket('ws://localhost:8080/audio');
|
||||
|
||||
const recorder = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 将音频数据发送到WebSocket
|
||||
recorder.onAudioData((audioData) => {
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
ws.send(JSON.stringify({
|
||||
type: 'audio',
|
||||
data: base64,
|
||||
timestamp: Date.now(),
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
ws.onopen = async () => {
|
||||
console.log('WebSocket connected');
|
||||
await recorder.start();
|
||||
};
|
||||
|
||||
ws.onclose = async () => {
|
||||
console.log('WebSocket disconnected');
|
||||
await recorder.stop();
|
||||
await recorder.destroy();
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 示例3: 带有状态管理的录制器类
|
||||
*/
|
||||
export class ManagedRecorder {
|
||||
private recorder: AudioRecorder;
|
||||
private isRecording: boolean = false;
|
||||
private audioChunks: Float32Array[] = [];
|
||||
private onStatusChange?: (status: 'idle' | 'recording' | 'processing') => void;
|
||||
|
||||
constructor() {
|
||||
this.recorder = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 收集音频数据
|
||||
this.recorder.onAudioData((audioData) => {
|
||||
this.audioChunks.push(new Float32Array(audioData));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置状态变化回调
|
||||
*/
|
||||
onStatus(callback: (status: 'idle' | 'recording' | 'processing') => void) {
|
||||
this.onStatusChange = callback;
|
||||
}
|
||||
|
||||
/**
|
||||
* 开始录制
|
||||
*/
|
||||
async start() {
|
||||
if (this.isRecording) {
|
||||
console.warn('Already recording');
|
||||
return;
|
||||
}
|
||||
|
||||
this.audioChunks = [];
|
||||
await this.recorder.start();
|
||||
this.isRecording = true;
|
||||
this.onStatusChange?.('recording');
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止录制并返回所有音频数据
|
||||
*/
|
||||
async stop(): Promise<Float32Array> {
|
||||
if (!this.isRecording) {
|
||||
console.warn('Not recording');
|
||||
return new Float32Array(0);
|
||||
}
|
||||
|
||||
this.onStatusChange?.('processing');
|
||||
await this.recorder.stop();
|
||||
this.isRecording = false;
|
||||
|
||||
// 合并所有音频块
|
||||
const totalLength = this.audioChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const combined = new Float32Array(totalLength);
|
||||
let offset = 0;
|
||||
|
||||
for (const chunk of this.audioChunks) {
|
||||
combined.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
this.onStatusChange?.('idle');
|
||||
return combined;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取录制状态
|
||||
*/
|
||||
getStatus(): 'idle' | 'recording' {
|
||||
return this.isRecording ? 'recording' : 'idle';
|
||||
}
|
||||
|
||||
/**
|
||||
* 销毁录制器
|
||||
*/
|
||||
async destroy() {
|
||||
await this.recorder.destroy();
|
||||
this.audioChunks = [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 示例4: 使用ManagedRecorder
|
||||
*/
|
||||
export async function managedRecorderExample() {
|
||||
const recorder = new ManagedRecorder();
|
||||
|
||||
// 监听状态变化
|
||||
recorder.onStatus((status) => {
|
||||
console.log('Status changed:', status);
|
||||
});
|
||||
|
||||
// 开始录制
|
||||
await recorder.start();
|
||||
console.log('Recording...');
|
||||
|
||||
// 5秒后停止并获取数据
|
||||
setTimeout(async () => {
|
||||
const audioData = await recorder.stop();
|
||||
console.log('Recorded audio length:', audioData.length);
|
||||
|
||||
// 转换为Base64
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
console.log('Total Base64 length:', base64.length);
|
||||
|
||||
// 清理
|
||||
await recorder.destroy();
|
||||
}, 5000);
|
||||
}
|
||||
243
web/src/apps/muse/voice/modules/AudioRecorder.ts
Normal file
243
web/src/apps/muse/voice/modules/AudioRecorder.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* Audio Recorder Module
|
||||
*
|
||||
* @description 独立的音频录制模块,使用AudioWorklet替代已弃用的ScriptProcessorNode,可在纯JS环境运行
|
||||
* @tags audio, recorder, audioworklet, web-audio-api
|
||||
* @createdAt 2025-12-24
|
||||
*/
|
||||
|
||||
export type AudioDataCallback = (audioData: Float32Array) => void;
|
||||
|
||||
export interface AudioRecorderConfig {
|
||||
sampleRate?: number;
|
||||
bufferSize?: number;
|
||||
}
|
||||
|
||||
export class AudioRecorder {
|
||||
private audioContext: AudioContext | null = null;
|
||||
private mediaStream: MediaStream | null = null;
|
||||
private sourceNode: MediaStreamAudioSourceNode | null = null;
|
||||
private workletNode: AudioWorkletNode | null = null;
|
||||
private isRecording: boolean = false;
|
||||
private onAudioDataCallback: AudioDataCallback | null = null;
|
||||
private config: Required<AudioRecorderConfig>;
|
||||
|
||||
constructor(config: AudioRecorderConfig = {}) {
|
||||
this.config = {
|
||||
sampleRate: config.sampleRate ?? 16000,
|
||||
bufferSize: config.bufferSize ?? 4096,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置音频数据回调函数
|
||||
*/
|
||||
public onAudioData(callback: AudioDataCallback): void {
|
||||
this.onAudioDataCallback = callback;
|
||||
}
|
||||
|
||||
/**
|
||||
* 开始录制
|
||||
*/
|
||||
public async start(): Promise<void> {
|
||||
if (this.isRecording) {
|
||||
console.warn('Recording is already in progress');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// 获取麦克风权限
|
||||
this.mediaStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
autoGainControl: true,
|
||||
}
|
||||
});
|
||||
|
||||
// 创建音频上下文
|
||||
this.audioContext = new AudioContext({
|
||||
sampleRate: this.config.sampleRate
|
||||
});
|
||||
|
||||
// 加载AudioWorklet处理器
|
||||
await this.loadAudioWorklet();
|
||||
|
||||
// 创建音频源节点
|
||||
this.sourceNode = this.audioContext.createMediaStreamSource(this.mediaStream);
|
||||
|
||||
// 创建AudioWorklet节点
|
||||
this.workletNode = new AudioWorkletNode(
|
||||
this.audioContext,
|
||||
'audio-recorder-processor',
|
||||
{
|
||||
processorOptions: {
|
||||
bufferSize: this.config.bufferSize,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// 监听音频数据
|
||||
this.workletNode.port.onmessage = (event) => {
|
||||
if (event.data.type === 'audio-data' && this.onAudioDataCallback) {
|
||||
this.onAudioDataCallback(event.data.audioData);
|
||||
}
|
||||
};
|
||||
|
||||
// 连接节点
|
||||
this.sourceNode.connect(this.workletNode);
|
||||
this.workletNode.connect(this.audioContext.destination);
|
||||
|
||||
this.isRecording = true;
|
||||
console.log('Recording started');
|
||||
} catch (error) {
|
||||
console.error('Error starting recording:', error);
|
||||
await this.cleanup();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止录制
|
||||
*/
|
||||
public async stop(): Promise<void> {
|
||||
if (!this.isRecording) {
|
||||
console.warn('Recording is not in progress');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.cleanup();
|
||||
this.isRecording = false;
|
||||
console.log('Recording stopped');
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取录制状态
|
||||
*/
|
||||
public getIsRecording(): boolean {
|
||||
return this.isRecording;
|
||||
}
|
||||
|
||||
/**
|
||||
* 加载AudioWorklet处理器
|
||||
*/
|
||||
private async loadAudioWorklet(): Promise<void> {
|
||||
if (!this.audioContext) {
|
||||
throw new Error('AudioContext is not initialized');
|
||||
}
|
||||
|
||||
// 创建AudioWorklet处理器代码
|
||||
const processorCode = `
|
||||
class AudioRecorderProcessor extends AudioWorkletProcessor {
|
||||
constructor(options) {
|
||||
super();
|
||||
this.bufferSize = options.processorOptions?.bufferSize || 4096;
|
||||
this.buffer = [];
|
||||
this.bufferLength = 0;
|
||||
}
|
||||
|
||||
process(inputs, outputs, parameters) {
|
||||
const input = inputs[0];
|
||||
if (input && input.length > 0) {
|
||||
const channelData = input[0];
|
||||
|
||||
// 累积音频数据
|
||||
this.buffer.push(new Float32Array(channelData));
|
||||
this.bufferLength += channelData.length;
|
||||
|
||||
// 当累积的数据达到bufferSize时,发送数据
|
||||
if (this.bufferLength >= this.bufferSize) {
|
||||
// 合并buffer中的所有数据
|
||||
const combinedData = new Float32Array(this.bufferLength);
|
||||
let offset = 0;
|
||||
for (const chunk of this.buffer) {
|
||||
combinedData.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
// 发送音频数据
|
||||
this.port.postMessage({
|
||||
type: 'audio-data',
|
||||
audioData: combinedData
|
||||
});
|
||||
|
||||
// 重置buffer
|
||||
this.buffer = [];
|
||||
this.bufferLength = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
registerProcessor('audio-recorder-processor', AudioRecorderProcessor);
|
||||
`;
|
||||
|
||||
// 将处理器代码转换为Blob URL
|
||||
const blob = new Blob([processorCode], { type: 'application/javascript' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
try {
|
||||
await this.audioContext.audioWorklet.addModule(url);
|
||||
} finally {
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清理资源
|
||||
*/
|
||||
private async cleanup(): Promise<void> {
|
||||
// 断开连接
|
||||
if (this.workletNode) {
|
||||
this.workletNode.disconnect();
|
||||
this.workletNode.port.onmessage = null;
|
||||
this.workletNode = null;
|
||||
}
|
||||
|
||||
if (this.sourceNode) {
|
||||
this.sourceNode.disconnect();
|
||||
this.sourceNode = null;
|
||||
}
|
||||
|
||||
// 关闭音频上下文
|
||||
if (this.audioContext) {
|
||||
await this.audioContext.close();
|
||||
this.audioContext = null;
|
||||
}
|
||||
|
||||
// 停止媒体流
|
||||
if (this.mediaStream) {
|
||||
this.mediaStream.getTracks().forEach(track => track.stop());
|
||||
this.mediaStream = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Float32Array转Base64
|
||||
*/
|
||||
public static float32ArrayToBase64(float32Array: Float32Array): string {
|
||||
const buffer = new ArrayBuffer(float32Array.length * 4);
|
||||
const view = new DataView(buffer);
|
||||
for (let i = 0; i < float32Array.length; i++) {
|
||||
view.setFloat32(i * 4, float32Array[i], true);
|
||||
}
|
||||
const binary = new Uint8Array(buffer);
|
||||
let binaryString = '';
|
||||
for (let i = 0; i < binary.length; i++) {
|
||||
binaryString += String.fromCharCode(binary[i]);
|
||||
}
|
||||
return typeof window !== 'undefined' && window.btoa
|
||||
? window.btoa(binaryString)
|
||||
: Buffer.from(binaryString, 'binary').toString('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* 销毁实例
|
||||
*/
|
||||
public async destroy(): Promise<void> {
|
||||
await this.stop();
|
||||
this.onAudioDataCallback = null;
|
||||
}
|
||||
}
|
||||
@@ -415,9 +415,7 @@ export const VadVoice = () => {
|
||||
});
|
||||
});
|
||||
};
|
||||
relatime?.showCostTime?.();
|
||||
const duration = await getDuration();
|
||||
relatime?.showCostTime?.();
|
||||
console.log(`Detected speech end. Duration: ${duration.toFixed(2)}s`);
|
||||
|
||||
// 使用 store 添加语音记录
|
||||
|
||||
10
web/src/apps/muse/voice/modules/index.ts
Normal file
10
web/src/apps/muse/voice/modules/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Audio Recorder Module Exports
|
||||
*
|
||||
* @description 音频录制模块的导出文件
|
||||
* @tags audio, recorder, export
|
||||
* @createdAt 2025-12-24
|
||||
*/
|
||||
|
||||
export { AudioRecorder } from './AudioRecorder';
|
||||
export type { AudioDataCallback, AudioRecorderConfig } from './AudioRecorder';
|
||||
@@ -50,10 +50,10 @@ export class Relatime {
|
||||
const voice = data.toString('base64');
|
||||
this.asr.ws.send(JSON.stringify({ voice }));
|
||||
}
|
||||
sendBase64(data: string) {
|
||||
sendBase64(data: string, opts?: { isRelatime?: boolean }) {
|
||||
if (!this.ready) return;
|
||||
console.log('send 花费时间:', Date.now() - this.startTime);
|
||||
this.asr.ws.send(JSON.stringify({ voice: data, format: 'float32', time: Date.now() }));
|
||||
this.asr.ws.send(JSON.stringify({ voice: data, format: 'float32', time: Date.now(), ...opts }));
|
||||
// if (this.timeoutHandle) {
|
||||
// clearTimeout(this.timeoutHandle);
|
||||
// }
|
||||
|
||||
73
web/src/apps/muse/voice/test/test-record.tsx
Normal file
73
web/src/apps/muse/voice/test/test-record.tsx
Normal file
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* Test Record Component
|
||||
*
|
||||
* @description 测试音频录制功能的组件,使用AudioRecorder类进行音频录制
|
||||
* @tags audio, test, recorder, component
|
||||
* @createdAt 2025-12-24
|
||||
*/
|
||||
|
||||
import { useEffect, useState, useRef } from "react";
|
||||
import { useVoiceStore } from "../store"
|
||||
import { AudioRecorder } from "../modules/AudioRecorder";
|
||||
|
||||
export const TestRecord = () => {
|
||||
const { initialize: initializeStore, relatime } = useVoiceStore()
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const audioRecorderRef = useRef<AudioRecorder | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
initializeStore();
|
||||
|
||||
|
||||
}, [initializeStore]);
|
||||
useEffect(() => {
|
||||
// 初始化AudioRecorder实例
|
||||
audioRecorderRef.current = new AudioRecorder({
|
||||
sampleRate: 16000,
|
||||
bufferSize: 4096,
|
||||
});
|
||||
|
||||
// 设置音频数据回调
|
||||
audioRecorderRef.current.onAudioData((audioData) => {
|
||||
console.log('Received audio data, length:', audioData.length);
|
||||
const base64 = AudioRecorder.float32ArrayToBase64(audioData);
|
||||
const relatime = useVoiceStore.getState().relatime;
|
||||
relatime?.sendBase64(base64);
|
||||
});
|
||||
|
||||
// 清理函数
|
||||
return () => {
|
||||
audioRecorderRef.current?.destroy();
|
||||
};
|
||||
}, [])
|
||||
|
||||
const startRecording = async () => {
|
||||
try {
|
||||
await audioRecorderRef.current?.start();
|
||||
setIsRecording(true);
|
||||
} catch (error) {
|
||||
console.error("Error starting recording:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = async () => {
|
||||
try {
|
||||
await audioRecorderRef.current?.stop();
|
||||
setIsRecording(false);
|
||||
} catch (error) {
|
||||
console.error("Error stopping recording:", error);
|
||||
}
|
||||
};
|
||||
|
||||
return <div>
|
||||
Test Record Component
|
||||
|
||||
<button
|
||||
className="p-2 border"
|
||||
onClick={isRecording ? stopRecording : startRecording}
|
||||
>
|
||||
{isRecording ? "stop record" : "start record"}
|
||||
</button>
|
||||
|
||||
</div>
|
||||
}
|
||||
8
web/src/pages/test/record.astro
Normal file
8
web/src/pages/test/record.astro
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
import Html from '../../components/html.astro';
|
||||
import { TestRecord } from '@/apps/muse/voice/test/test-record.tsx';
|
||||
---
|
||||
|
||||
<Html>
|
||||
<TestRecord client:only />
|
||||
</Html>
|
||||
Reference in New Issue
Block a user