awacke1 commited on
Commit
612f193
·
verified ·
1 Parent(s): fe87343

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +222 -175
index.html CHANGED
@@ -3,222 +3,269 @@
3
  <head>
4
  <meta charset="UTF-8">
5
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
- <title>Smart Audio Recorder</title>
7
  <style>
8
  body {
9
- font-family: 'Arial', sans-serif;
10
  max-width: 800px;
11
  margin: 0 auto;
12
  padding: 20px;
13
- background-color: #f5f5f5;
14
  }
15
  .container {
16
- background-color: white;
17
  padding: 20px;
18
- border-radius: 10px;
19
  box-shadow: 0 2px 4px rgba(0,0,0,0.1);
20
  }
21
- .record-button {
22
- background-color: #ff4444;
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  color: white;
24
  border: none;
25
- padding: 15px 30px;
26
- border-radius: 25px;
27
- font-size: 18px;
28
  cursor: pointer;
29
- transition: background-color 0.3s;
30
  }
31
- .record-button.recording {
32
- background-color: #cc0000;
33
- animation: pulse 1.5s infinite;
34
  }
35
- .status {
36
- margin-top: 20px;
37
- padding: 10px;
38
- border-radius: 5px;
39
- background-color: #f8f9fa;
40
- }
41
- @keyframes pulse {
42
- 0% { transform: scale(1); }
43
- 50% { transform: scale(1.05); }
44
- 100% { transform: scale(1); }
45
- }
46
- .meter {
47
- height: 20px;
48
- background-color: #e9ecef;
49
- border-radius: 10px;
50
  margin: 20px 0;
51
- overflow: hidden;
 
 
 
 
 
52
  }
53
- .meter-fill {
54
- height: 100%;
55
- width: 0%;
56
- background-color: #4CAF50;
57
- transition: width 0.1s;
58
  }
59
  .error {
60
- color: #dc3545;
 
61
  padding: 10px;
62
- margin-top: 10px;
63
- border: 1px solid #dc3545;
64
- border-radius: 5px;
65
  display: none;
66
  }
67
  </style>
68
  </head>
69
  <body>
70
  <div class="container">
71
- <h1>Smart Audio Recorder</h1>
72
- <p>Records when active audio is detected. Saves only recordings with more than 5 seconds of active audio.</p>
73
-
74
- <button id="recordButton" class="record-button">Start Recording</button>
75
- <div class="meter">
76
- <div id="meterFill" class="meter-fill"></div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  </div>
78
- <div id="status" class="status">Ready to record</div>
79
- <div id="error" class="error"></div>
80
  </div>
81
 
82
  <script>
83
- class SmartRecorder {
84
- constructor() {
85
- this.mediaRecorder = null;
86
- this.audioContext = null;
87
- this.analyser = null;
88
- this.chunks = [];
89
- this.activeAudioTime = 0;
90
- this.lastActiveTime = 0;
91
- this.isRecording = false;
92
- this.silenceThreshold = 0.015;
93
- this.minActiveAudio = 5; // seconds
94
-
95
- this.recordButton = document.getElementById('recordButton');
96
- this.status = document.getElementById('status');
97
- this.meterFill = document.getElementById('meterFill');
98
- this.errorDiv = document.getElementById('error');
99
-
100
- this.recordButton.addEventListener('click', () => this.toggleRecording());
101
-
102
- // Check if we're running on localhost or HTTPS
103
- if (!(window.location.protocol === 'https:' || window.location.hostname === 'localhost' || window.location.hostname === '127.0.0.1')) {
104
- this.showError('This application requires HTTPS or localhost to access the microphone.');
105
- this.recordButton.disabled = true;
106
- return;
107
- }
108
-
109
- this.setupAudioContext();
110
- }
111
 
112
- showError(message) {
113
- this.errorDiv.textContent = message;
114
- this.errorDiv.style.display = 'block';
115
- console.error(message);
116
- }
 
 
 
 
117
 
118
- async setupAudioContext() {
119
- try {
120
- const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
121
- this.audioContext = new AudioContext();
122
- const source = this.audioContext.createMediaStreamSource(stream);
123
- this.analyser = this.audioContext.createAnalyser();
124
- this.analyser.fftSize = 2048;
125
- source.connect(this.analyser);
126
- this.errorDiv.style.display = 'none';
127
- } catch (err) {
128
- this.showError(`Error accessing microphone: ${err.message}`);
129
- this.recordButton.disabled = true;
130
- }
131
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
- async toggleRecording() {
134
- if (!this.isRecording) {
135
- await this.startRecording();
136
- } else {
137
- await this.stopRecording();
138
- }
 
 
 
 
139
  }
 
 
 
 
 
 
 
140
 
141
- async startRecording() {
142
- try {
143
- const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
144
- this.mediaRecorder = new MediaRecorder(stream);
145
- this.chunks = [];
146
- this.activeAudioTime = 0;
147
- this.lastActiveTime = Date.now();
148
- this.isRecording = true;
149
-
150
- this.mediaRecorder.ondataavailable = (e) => this.chunks.push(e.data);
151
- this.mediaRecorder.start();
152
-
153
- this.recordButton.classList.add('recording');
154
- this.recordButton.textContent = 'Stop Recording';
155
- this.status.textContent = 'Recording...';
156
- this.errorDiv.style.display = 'none';
157
-
158
- this.startAudioAnalysis();
159
- } catch (err) {
160
- this.showError(`Error starting recording: ${err.message}`);
161
- }
162
  }
163
 
164
- async stopRecording() {
165
- if (!this.mediaRecorder) return;
166
-
167
- this.isRecording = false;
168
- this.mediaRecorder.stop();
169
- this.recordButton.classList.remove('recording');
170
- this.recordButton.textContent = 'Start Recording';
171
-
172
- this.mediaRecorder.onstop = async () => {
173
- if (this.activeAudioTime >= this.minActiveAudio) {
174
- const blob = new Blob(this.chunks, { type: 'audio/wav' });
175
- const url = URL.createObjectURL(blob);
176
- const link = document.createElement('a');
177
- link.href = url;
178
- link.download = `recording_${new Date().toISOString()}.wav`;
179
- link.click();
180
- this.status.textContent = `Saved recording with ${this.activeAudioTime.toFixed(1)} seconds of active audio`;
181
- } else {
182
- this.status.textContent = `Recording discarded: Only ${this.activeAudioTime.toFixed(1)} seconds of active audio (minimum ${this.minActiveAudio}s required)`;
183
- }
184
- this.meterFill.style.width = '0%';
185
- };
186
  }
 
187
 
188
- startAudioAnalysis() {
189
- const analyzeFrame = () => {
190
- if (!this.isRecording) return;
191
-
192
- const dataArray = new Float32Array(this.analyser.frequencyBinCount);
193
- this.analyser.getFloatTimeDomainData(dataArray);
194
- const rms = Math.sqrt(dataArray.reduce((acc, val) => acc + val * val, 0) / dataArray.length);
195
-
196
- if (rms > this.silenceThreshold) {
197
- const now = Date.now();
198
- const timeDiff = (now - this.lastActiveTime) / 1000;
199
- this.activeAudioTime += timeDiff;
200
- this.lastActiveTime = now;
201
- }
202
-
203
- // Update meter
204
- const meterLevel = Math.min(100, (rms * 400));
205
- this.meterFill.style.width = `${meterLevel}%`;
206
-
207
- // Update status with active audio time
208
- if (this.isRecording) {
209
- this.status.textContent = `Recording... Active audio: ${this.activeAudioTime.toFixed(1)}s`;
210
- }
211
-
212
- requestAnimationFrame(analyzeFrame);
213
- };
214
-
215
- analyzeFrame();
216
  }
217
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
219
- // Initialize recorder when page loads
220
- window.addEventListener('load', () => {
221
- new SmartRecorder();
222
  });
223
  </script>
224
  </body>
 
3
  <head>
4
  <meta charset="UTF-8">
5
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Voice Assistant Demo</title>
7
  <style>
8
  body {
9
+ font-family: system-ui, -apple-system, sans-serif;
10
  max-width: 800px;
11
  margin: 0 auto;
12
  padding: 20px;
13
+ background: #f0f0f0;
14
  }
15
  .container {
16
+ background: white;
17
  padding: 20px;
18
+ border-radius: 8px;
19
  box-shadow: 0 2px 4px rgba(0,0,0,0.1);
20
  }
21
+ .status {
22
+ margin: 20px 0;
23
+ padding: 10px;
24
+ border-radius: 4px;
25
+ background: #e8f5e9;
26
+ }
27
+ .transcript {
28
+ margin: 20px 0;
29
+ padding: 15px;
30
+ background: #f5f5f5;
31
+ border-radius: 4px;
32
+ min-height: 50px;
33
+ }
34
+ button {
35
+ background: #2196F3;
36
  color: white;
37
  border: none;
38
+ padding: 10px 20px;
39
+ border-radius: 4px;
 
40
  cursor: pointer;
41
+ font-size: 16px;
42
  }
43
+ button:hover {
44
+ background: #1976D2;
 
45
  }
46
+ button:disabled {
47
+ background: #ccc;
48
+ cursor: not-allowed;
49
+ }
50
+ .controls {
51
+ display: flex;
52
+ gap: 10px;
53
+ margin: 20px 0;
54
+ }
55
+ .voice-settings {
 
 
 
 
 
56
  margin: 20px 0;
57
+ padding: 15px;
58
+ background: #f8f9fa;
59
+ border-radius: 4px;
60
+ }
61
+ .setting-group {
62
+ margin: 10px 0;
63
  }
64
+ label {
65
+ display: inline-block;
66
+ width: 100px;
 
 
67
  }
68
  .error {
69
+ background: #ffebee;
70
+ color: #c62828;
71
  padding: 10px;
72
+ border-radius: 4px;
73
+ margin: 10px 0;
 
74
  display: none;
75
  }
76
  </style>
77
  </head>
78
  <body>
79
  <div class="container">
80
+ <h1>Voice Assistant Demo</h1>
81
+ <p>Click "Start Listening" and speak a command. Try saying:</p>
82
+ <ul>
83
+ <li>"Hello" - The assistant will greet you back</li>
84
+ <li>"What time is it?" - Get the current time</li>
85
+ <li>"Tell me a fact" - Hear an interesting fact</li>
86
+ </ul>
87
+
88
+ <div class="controls">
89
+ <button id="startBtn">Start Listening</button>
90
+ <button id="stopBtn" disabled>Stop Listening</button>
91
+ </div>
92
+
93
+ <div class="error" id="error"></div>
94
+ <div class="status" id="status">Status: Ready</div>
95
+ <div class="transcript" id="transcript"></div>
96
+
97
+ <div class="voice-settings">
98
+ <h3>Voice Settings</h3>
99
+ <div class="setting-group">
100
+ <label for="voice">Voice:</label>
101
+ <select id="voice"></select>
102
+ </div>
103
+ <div class="setting-group">
104
+ <label for="rate">Rate:</label>
105
+ <input type="range" id="rate" min="0.5" max="2" value="1" step="0.1">
106
+ <span id="rateValue">1</span>
107
+ </div>
108
+ <div class="setting-group">
109
+ <label for="pitch">Pitch:</label>
110
+ <input type="range" id="pitch" min="0.5" max="2" value="1" step="0.1">
111
+ <span id="pitchValue">1</span>
112
+ </div>
113
  </div>
 
 
114
  </div>
115
 
116
  <script>
117
+ // Check if speech recognition is supported
118
+ if (!('webkitSpeechRecognition' in window) && !('SpeechRecognition' in window)) {
119
+ document.getElementById('error').style.display = 'block';
120
+ document.getElementById('error').textContent = 'Speech recognition is not supported in this browser. Please try Chrome.';
121
+ document.getElementById('startBtn').disabled = true;
122
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
 
124
+ // Initialize speech recognition
125
+ const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
126
+ const recognition = new SpeechRecognition();
127
+
128
+ // Configure recognition
129
+ recognition.continuous = false;
130
+ recognition.lang = 'en-US';
131
+ recognition.interimResults = false;
132
+ recognition.maxAlternatives = 1;
133
 
134
+ // Initialize speech synthesis
135
+ const synth = window.speechSynthesis;
136
+ let voices = [];
137
+
138
+ // DOM elements
139
+ const startBtn = document.getElementById('startBtn');
140
+ const stopBtn = document.getElementById('stopBtn');
141
+ const status = document.getElementById('status');
142
+ const transcript = document.getElementById('transcript');
143
+ const voiceSelect = document.getElementById('voice');
144
+ const rate = document.getElementById('rate');
145
+ const pitch = document.getElementById('pitch');
146
+ const rateValue = document.getElementById('rateValue');
147
+ const pitchValue = document.getElementById('pitchValue');
148
+ const errorDiv = document.getElementById('error');
149
+
150
+ // Populate voice list
151
+ function populateVoices() {
152
+ voices = synth.getVoices();
153
+ voiceSelect.innerHTML = '';
154
+ voices.forEach((voice, i) => {
155
+ const option = document.createElement('option');
156
+ option.textContent = `${voice.name} (${voice.lang})`;
157
+ option.setAttribute('data-name', voice.name);
158
+ voiceSelect.appendChild(option);
159
+ });
160
+ }
161
 
162
+ populateVoices();
163
+ if (speechSynthesis.onvoiceschanged !== undefined) {
164
+ speechSynthesis.onvoiceschanged = populateVoices;
165
+ }
166
+
167
+ // Speech synthesis function
168
+ function speak(text) {
169
+ if (synth.speaking) {
170
+ console.error('speechSynthesis.speaking');
171
+ return;
172
  }
173
+ const utterance = new SpeechSynthesisUtterance(text);
174
+ const selectedVoice = voices[voiceSelect.selectedIndex];
175
+ utterance.voice = selectedVoice;
176
+ utterance.rate = rate.value;
177
+ utterance.pitch = pitch.value;
178
+ synth.speak(utterance);
179
+ }
180
 
181
+ // Handle commands
182
+ function handleCommand(command) {
183
+ command = command.toLowerCase();
184
+ let response = '';
185
+
186
+ if (command.includes('hello')) {
187
+ response = 'Hello! How can I help you today?';
188
+ } else if (command.includes('what time')) {
189
+ const now = new Date();
190
+ response = `The current time is ${now.toLocaleTimeString()}`;
191
+ } else if (command.includes('tell me a fact')) {
192
+ const facts = [
193
+ 'The shortest war in history was between Britain and Zanzibar on August 27, 1896. Zanzibar surrendered after just 38 minutes.',
194
+ 'Honey never spoils. Archaeologists have found pots of honey in ancient Egyptian tombs that are over 3,000 years old and still perfectly good to eat.',
195
+ 'The first oranges weren't orange. The original oranges from Southeast Asia were actually green.',
196
+ ];
197
+ response = facts[Math.floor(Math.random() * facts.length)];
198
+ } else {
199
+ response = "I'm sorry, I didn't understand that command. Please try again.";
 
 
200
  }
201
 
202
+ transcript.textContent = `You said: ${command}\nAssistant: ${response}`;
203
+ speak(response);
204
+ }
205
+
206
+ // Event listeners
207
+ startBtn.addEventListener('click', () => {
208
+ errorDiv.style.display = 'none';
209
+ try {
210
+ recognition.start();
211
+ startBtn.disabled = true;
212
+ stopBtn.disabled = false;
213
+ status.textContent = 'Status: Listening...';
214
+ console.log('Recognition started');
215
+ } catch (err) {
216
+ console.error('Recognition error:', err);
217
+ errorDiv.style.display = 'block';
218
+ errorDiv.textContent = `Error starting recognition: ${err.message}`;
 
 
 
 
 
219
  }
220
+ });
221
 
222
+ stopBtn.addEventListener('click', () => {
223
+ try {
224
+ recognition.stop();
225
+ startBtn.disabled = false;
226
+ stopBtn.disabled = true;
227
+ status.textContent = 'Status: Stopped';
228
+ } catch (err) {
229
+ console.error('Stop error:', err);
230
+ errorDiv.style.display = 'block';
231
+ errorDiv.textContent = `Error stopping recognition: ${err.message}`;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  }
233
+ });
234
+
235
+ recognition.onstart = () => {
236
+ console.log('Recognition started');
237
+ status.textContent = 'Status: Listening...';
238
+ };
239
+
240
+ recognition.onresult = (event) => {
241
+ console.log('Recognition result received');
242
+ const command = event.results[0][0].transcript;
243
+ handleCommand(command);
244
+ };
245
+
246
+ recognition.onend = () => {
247
+ console.log('Recognition ended');
248
+ startBtn.disabled = false;
249
+ stopBtn.disabled = true;
250
+ status.textContent = 'Status: Ready';
251
+ };
252
+
253
+ recognition.onerror = (event) => {
254
+ console.error('Recognition error:', event.error);
255
+ errorDiv.style.display = 'block';
256
+ errorDiv.textContent = `Error: ${event.error}. ${event.error === 'not-allowed' ? 'Please ensure microphone permissions are granted.' : ''}`;
257
+ status.textContent = 'Status: Error';
258
+ startBtn.disabled = false;
259
+ stopBtn.disabled = true;
260
+ };
261
+
262
+ // Voice setting controls
263
+ rate.addEventListener('input', () => {
264
+ rateValue.textContent = rate.value;
265
+ });
266
 
267
+ pitch.addEventListener('input', () => {
268
+ pitchValue.textContent = pitch.value;
 
269
  });
270
  </script>
271
  </body>