File size: 3,773 Bytes
690a40d
 
64745dd
 
35c66c7
690a40d
35c66c7
 
690a40d
 
 
 
 
 
 
 
 
35c66c7
690a40d
 
 
 
 
 
 
 
 
35c66c7
64745dd
690a40d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64745dd
 
 
 
 
 
 
 
 
 
 
 
 
 
35c66c7
6f89e0d
35c66c7
 
 
 
 
 
690a40d
35c66c7
 
64745dd
6f89e0d
64745dd
6f89e0d
64745dd
 
 
 
 
35c66c7
64745dd
 
690a40d
64745dd
 
 
 
 
35c66c7
64745dd
6f89e0d
64745dd
690a40d
64745dd
 
 
 
 
 
 
35c66c7
690a40d
 
 
35c66c7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
let transcriber;
let recording = false;
let mediaRecorder;
let audioChunks = [];

// Function to Load Model
async function loadModel() {
    document.getElementById("modelStatus").innerText = "⏳ Loading Whisper model...";
    try {
        const timeout = new Promise((_, reject) =>
            setTimeout(() => reject(new Error("Model took too long to load!")), 60000) // 60s timeout
        );

        transcriber = await Promise.race([
            window.Transformers.pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en'),
            timeout
        ]);

        document.getElementById("modelStatus").innerText = "βœ… Model Loaded!";
        document.getElementById("recordButton").disabled = false;
        document.getElementById("recordButton").innerText = "🎀 Start Recording";
        document.getElementById("testModel").disabled = false;
    } catch (error) {
        document.getElementById("modelStatus").innerText = "❌ Model failed to load!";
        document.getElementById("error").innerText = error.message;
        console.error("Error loading model:", error);
    }
}

// Function to Test Model
async function testModel() {
    try {
        document.getElementById("status").innerText = "⏳ Running test...";
        let output = await transcriber("https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav");
        document.getElementById("output").innerText = "Test Passed: " + output.text;
        document.getElementById("status").innerText = "βœ… Model Test Passed!";
    } catch (error) {
        document.getElementById("status").innerText = "❌ Model Test Failed!";
        document.getElementById("error").innerText = error.message;
        console.error("Test Error:", error);
    }
}

// Function to Start Recording
async function startRecording() {
    let stream = await navigator.mediaDevices.getUserMedia({ audio: true });
    mediaRecorder = new MediaRecorder(stream);

    mediaRecorder.ondataavailable = (event) => {
        audioChunks.push(event.data);
    };

    mediaRecorder.onstop = async () => {
        let audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
        let reader = new FileReader();

        reader.onloadend = async () => {
            let audioURL = reader.result;
            document.getElementById("status").innerText = "⏳ Transcribing...";

            try {
                let output = await transcriber(audioURL);
                document.getElementById("output").innerText = output.text;
                document.getElementById("status").innerText = "βœ… Done!";
            } catch (error) {
                document.getElementById("status").innerText = "❌ Error during transcription.";
                document.getElementById("error").innerText = error.message;
                console.error(error);
            }
        };

        reader.readAsDataURL(audioBlob);
    };

    mediaRecorder.start();
    audioChunks = [];
    recording = true;
    document.getElementById("recordButton").innerText = "⏹ Stop Recording";
    document.getElementById("status").innerText = "πŸŽ™οΈ Recording...";
}

// Function to Stop Recording
function stopRecording() {
    if (mediaRecorder && recording) {
        mediaRecorder.stop();
        recording = false;
        document.getElementById("recordButton").innerText = "🎀 Start Recording";
        document.getElementById("status").innerText = "⏳ Processing audio...";
    }
}

// Attach event listeners
document.getElementById("recordButton").addEventListener("click", () => {
    if (!recording) {
        startRecording();
    } else {
        stopRecording();
    }
});

document.getElementById("testModel").addEventListener("click", testModel);

// Load model on page start
loadModel();