|
class AudioManager {
|
|
constructor() {
|
|
|
|
this.config = {
|
|
sampleRate: 44100,
|
|
channels: 1,
|
|
bitDepth: 16,
|
|
maxRecordingTime: 30000,
|
|
minRecordingTime: 15000
|
|
};
|
|
|
|
|
|
this.state = {
|
|
isRecording: false,
|
|
startTime: null,
|
|
recorder: null,
|
|
stream: null,
|
|
audioChunks: [],
|
|
audioContext: null,
|
|
analyser: null
|
|
};
|
|
|
|
|
|
this.visualizer = {
|
|
canvasContext: null,
|
|
dataArray: null,
|
|
bufferLength: null,
|
|
width: 0,
|
|
height: 0
|
|
};
|
|
|
|
|
|
try {
|
|
this.state.audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
} catch (error) {
|
|
console.error('AudioContext not supported in this browser');
|
|
}
|
|
|
|
|
|
this.startRecording = this.startRecording.bind(this);
|
|
this.stopRecording = this.stopRecording.bind(this);
|
|
this.processAudio = this.processAudio.bind(this);
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
initializeVisualizer(canvas) {
|
|
if (!canvas) return;
|
|
|
|
this.visualizer.canvasContext = canvas.getContext('2d');
|
|
this.visualizer.width = canvas.width;
|
|
this.visualizer.height = canvas.height;
|
|
|
|
|
|
this.state.analyser = this.state.audioContext.createAnalyser();
|
|
this.state.analyser.fftSize = 2048;
|
|
this.visualizer.bufferLength = this.state.analyser.frequencyBinCount;
|
|
this.visualizer.dataArray = new Uint8Array(this.visualizer.bufferLength);
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
async startRecording() {
|
|
try {
|
|
|
|
this.state.stream = await navigator.mediaDevices.getUserMedia({
|
|
audio: {
|
|
channelCount: this.config.channels,
|
|
sampleRate: this.config.sampleRate
|
|
}
|
|
});
|
|
|
|
|
|
this.state.recorder = new MediaRecorder(this.state.stream, {
|
|
mimeType: 'audio/webm;codecs=opus'
|
|
});
|
|
|
|
|
|
this.state.recorder.ondataavailable = (event) => {
|
|
if (event.data.size > 0) {
|
|
this.state.audioChunks.push(event.data);
|
|
}
|
|
};
|
|
|
|
|
|
const source = this.state.audioContext.createMediaStreamSource(this.state.stream);
|
|
source.connect(this.state.analyser);
|
|
|
|
|
|
this.state.recorder.start(100);
|
|
this.state.isRecording = true;
|
|
this.state.startTime = Date.now();
|
|
|
|
|
|
if (this.visualizer.canvasContext) {
|
|
this.drawVisualization();
|
|
}
|
|
|
|
|
|
setTimeout(() => {
|
|
if (this.state.isRecording) {
|
|
this.stopRecording();
|
|
}
|
|
}, this.config.maxRecordingTime);
|
|
|
|
} catch (error) {
|
|
console.error('Error starting recording:', error);
|
|
throw new Error('Failed to start recording');
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
async stopRecording() {
|
|
return new Promise((resolve, reject) => {
|
|
try {
|
|
const recordingDuration = Date.now() - this.state.startTime;
|
|
|
|
|
|
if (recordingDuration < this.config.minRecordingTime) {
|
|
throw new Error('Recording too short');
|
|
}
|
|
|
|
this.state.recorder.onstop = async () => {
|
|
try {
|
|
const audioBlob = await this.processAudio();
|
|
resolve(audioBlob);
|
|
} catch (error) {
|
|
reject(error);
|
|
}
|
|
};
|
|
|
|
|
|
this.state.recorder.stop();
|
|
this.state.stream.getTracks().forEach(track => track.stop());
|
|
this.state.isRecording = false;
|
|
|
|
} catch (error) {
|
|
reject(error);
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
async processAudio() {
|
|
try {
|
|
|
|
const audioBlob = new Blob(this.state.audioChunks, { type: 'audio/webm;codecs=opus' });
|
|
|
|
|
|
const arrayBuffer = await audioBlob.arrayBuffer();
|
|
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
|
|
|
|
const wavBuffer = this.createWAVBuffer(audioBuffer);
|
|
|
|
return new Blob([wavBuffer], { type: 'audio/wav' });
|
|
} catch (error) {
|
|
console.error('Error processing audio:', error);
|
|
throw new Error('Failed to process audio');
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
createWAVBuffer(audioBuffer) {
|
|
const numChannels = audioBuffer.numberOfChannels;
|
|
const length = audioBuffer.length * numChannels * 2;
|
|
const buffer = new ArrayBuffer(44 + length);
|
|
const view = new DataView(buffer);
|
|
|
|
|
|
this.writeWAVHeader(view, length, numChannels, audioBuffer.sampleRate);
|
|
|
|
|
|
const channels = [];
|
|
for (let i = 0; i < numChannels; i++) {
|
|
channels.push(audioBuffer.getChannelData(i));
|
|
}
|
|
|
|
let offset = 44;
|
|
for (let i = 0; i < audioBuffer.length; i++) {
|
|
for (let channel = 0; channel < numChannels; channel++) {
|
|
const sample = Math.max(-1, Math.min(1, channels[channel][i]));
|
|
view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
|
|
offset += 2;
|
|
}
|
|
}
|
|
|
|
return buffer;
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
writeWAVHeader(view, length, numChannels, sampleRate) {
|
|
|
|
this.writeString(view, 0, 'RIFF');
|
|
|
|
view.setUint32(4, 36 + length, true);
|
|
|
|
this.writeString(view, 8, 'WAVE');
|
|
|
|
this.writeString(view, 12, 'fmt ');
|
|
|
|
view.setUint32(16, 16, true);
|
|
|
|
view.setUint16(20, 1, true);
|
|
|
|
view.setUint16(22, numChannels, true);
|
|
|
|
view.setUint32(24, sampleRate, true);
|
|
|
|
view.setUint32(28, sampleRate * numChannels * 2, true);
|
|
|
|
view.setUint16(32, numChannels * 2, true);
|
|
|
|
view.setUint16(34, 16, true);
|
|
|
|
this.writeString(view, 36, 'data');
|
|
|
|
view.setUint32(40, length, true);
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
writeString(view, offset, string) {
|
|
for (let i = 0; i < string.length; i++) {
|
|
view.setUint8(offset + i, string.charCodeAt(i));
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
drawVisualization() {
|
|
if (!this.state.isRecording || !this.visualizer.canvasContext) return;
|
|
|
|
requestAnimationFrame(() => this.drawVisualization());
|
|
|
|
|
|
this.state.analyser.getByteFrequencyData(this.visualizer.dataArray);
|
|
|
|
|
|
this.visualizer.canvasContext.fillStyle = 'rgb(10, 10, 10)';
|
|
this.visualizer.canvasContext.fillRect(0, 0, this.visualizer.width, this.visualizer.height);
|
|
|
|
|
|
const barWidth = (this.visualizer.width / this.visualizer.bufferLength) * 2.5;
|
|
let barHeight;
|
|
let x = 0;
|
|
|
|
for (let i = 0; i < this.visualizer.bufferLength; i++) {
|
|
barHeight = (this.visualizer.dataArray[i] / 255) * this.visualizer.height;
|
|
|
|
this.visualizer.canvasContext.fillStyle = `rgb(${barHeight + 100},50,50)`;
|
|
this.visualizer.canvasContext.fillRect(
|
|
x,
|
|
this.visualizer.height - barHeight,
|
|
barWidth,
|
|
barHeight
|
|
);
|
|
|
|
x += barWidth + 1;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
cleanup() {
|
|
if (this.state.stream) {
|
|
this.state.stream.getTracks().forEach(track => track.stop());
|
|
}
|
|
this.state.audioChunks = [];
|
|
this.state.isRecording = false;
|
|
}
|
|
}
|
|
|
|
|
|
export default AudioManager; |