Spaces:
Running
Running
Update index.html
Browse files- index.html +40 -14
index.html
CHANGED
@@ -188,6 +188,9 @@
|
|
188 |
let bars;
|
189 |
let animationId;
|
190 |
let isListening = false;
|
|
|
|
|
|
|
191 |
|
192 |
function createVisualizer() {
|
193 |
const barCount = 64;
|
@@ -236,8 +239,10 @@
|
|
236 |
const botResponse = `I heard you say: "${transcription.text}".`;
|
237 |
addLog(`Bot: ${botResponse}`);
|
238 |
|
|
|
239 |
const speechOutput = await ttsPipeline(botResponse);
|
240 |
-
playAudio(speechOutput.audio);
|
|
|
241 |
} catch (error) {
|
242 |
console.error('Error processing speech:', error);
|
243 |
addLog('System: Error processing speech. Please try again.');
|
@@ -255,17 +260,30 @@
|
|
255 |
}
|
256 |
|
257 |
function playAudio(audioArray) {
|
258 |
-
|
259 |
-
|
260 |
-
|
|
|
261 |
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
267 |
}
|
268 |
|
|
|
|
|
|
|
|
|
|
|
|
|
269 |
|
270 |
async function toggleListening() {
|
271 |
if (isListening) {
|
@@ -274,7 +292,6 @@
|
|
274 |
await startListening();
|
275 |
}
|
276 |
}
|
277 |
-
|
278 |
|
279 |
async function startListening() {
|
280 |
try {
|
@@ -287,6 +304,11 @@
|
|
287 |
onSpeechStart: () => {
|
288 |
addLog('--- vad: speech start');
|
289 |
updateVisualizer();
|
|
|
|
|
|
|
|
|
|
|
290 |
},
|
291 |
onSpeechEnd: (audio) => {
|
292 |
addLog('--- vad: speech end');
|
@@ -295,8 +317,8 @@
|
|
295 |
}
|
296 |
});
|
297 |
|
298 |
-
|
299 |
-
const source = audioContext.createMediaStreamSource(
|
300 |
source.connect(analyser);
|
301 |
|
302 |
await myvad.start();
|
@@ -311,12 +333,16 @@
|
|
311 |
|
312 |
async function stopListening() {
|
313 |
if (myvad) {
|
314 |
-
await myvad.
|
|
|
|
|
|
|
|
|
315 |
startButton.textContent = 'Begin Call';
|
316 |
isListening = false;
|
317 |
addLog('System: Stopped listening.');
|
318 |
cancelAnimationFrame(animationId);
|
319 |
-
addLog('
|
320 |
}
|
321 |
}
|
322 |
|
|
|
188 |
let bars;
|
189 |
let animationId;
|
190 |
let isListening = false;
|
191 |
+
let microphoneStream;
|
192 |
+
let isSpeaking = false;
|
193 |
+
let currentAudioSource = null;
|
194 |
|
195 |
function createVisualizer() {
|
196 |
const barCount = 64;
|
|
|
239 |
const botResponse = `I heard you say: "${transcription.text}".`;
|
240 |
addLog(`Bot: ${botResponse}`);
|
241 |
|
242 |
+
isSpeaking = true;
|
243 |
const speechOutput = await ttsPipeline(botResponse);
|
244 |
+
await playAudio(speechOutput.audio);
|
245 |
+
isSpeaking = false;
|
246 |
} catch (error) {
|
247 |
console.error('Error processing speech:', error);
|
248 |
addLog('System: Error processing speech. Please try again.');
|
|
|
260 |
}
|
261 |
|
262 |
function playAudio(audioArray) {
|
263 |
+
return new Promise((resolve) => {
|
264 |
+
const audioBuffer = audioContext.createBuffer(1, audioArray.length, 16000);
|
265 |
+
const channelData = audioBuffer.getChannelData(0);
|
266 |
+
channelData.set(audioArray);
|
267 |
|
268 |
+
const source = audioContext.createBufferSource();
|
269 |
+
currentAudioSource = source; // Store the current audio source
|
270 |
+
source.buffer = audioBuffer;
|
271 |
+
source.connect(analyser);
|
272 |
+
analyser.connect(audioContext.destination);
|
273 |
+
source.start();
|
274 |
+
source.onended = () => {
|
275 |
+
currentAudioSource = null;
|
276 |
+
resolve();
|
277 |
+
};
|
278 |
+
});
|
279 |
}
|
280 |
|
281 |
+
function stopCurrentAudio() {
|
282 |
+
if (currentAudioSource) {
|
283 |
+
currentAudioSource.stop();
|
284 |
+
currentAudioSource = null;
|
285 |
+
}
|
286 |
+
}
|
287 |
|
288 |
async function toggleListening() {
|
289 |
if (isListening) {
|
|
|
292 |
await startListening();
|
293 |
}
|
294 |
}
|
|
|
295 |
|
296 |
async function startListening() {
|
297 |
try {
|
|
|
304 |
onSpeechStart: () => {
|
305 |
addLog('--- vad: speech start');
|
306 |
updateVisualizer();
|
307 |
+
if (isSpeaking) {
|
308 |
+
addLog('User interrupted. Stopping bot speech.');
|
309 |
+
stopCurrentAudio();
|
310 |
+
isSpeaking = false;
|
311 |
+
}
|
312 |
},
|
313 |
onSpeechEnd: (audio) => {
|
314 |
addLog('--- vad: speech end');
|
|
|
317 |
}
|
318 |
});
|
319 |
|
320 |
+
microphoneStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
321 |
+
const source = audioContext.createMediaStreamSource(microphoneStream);
|
322 |
source.connect(analyser);
|
323 |
|
324 |
await myvad.start();
|
|
|
333 |
|
334 |
async function stopListening() {
|
335 |
if (myvad) {
|
336 |
+
await myvad.stop();
|
337 |
+
if (microphoneStream) {
|
338 |
+
microphoneStream.getTracks().forEach(track => track.stop());
|
339 |
+
}
|
340 |
+
stopCurrentAudio();
|
341 |
startButton.textContent = 'Begin Call';
|
342 |
isListening = false;
|
343 |
addLog('System: Stopped listening.');
|
344 |
cancelAnimationFrame(animationId);
|
345 |
+
addLog('System: Microphone closed');
|
346 |
}
|
347 |
}
|
348 |
|