Spaces:
Sleeping
Sleeping
Update static/js/design.js
Browse files- static/js/design.js +44 -27
static/js/design.js
CHANGED
@@ -2,13 +2,8 @@ let isRecording = false;
|
|
2 |
let mediaRecorder;
|
3 |
let recordedChunks = [];
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
const outputContainer = document.getElementById('output');
|
8 |
-
const audioPlayerContainer = document.getElementById('audioPlayerContainer');
|
9 |
-
const audioPlayer = document.getElementById('audioPlayer');
|
10 |
-
|
11 |
-
micContainer.addEventListener('click', function () {
|
12 |
if (!isRecording) {
|
13 |
startRecording();
|
14 |
} else {
|
@@ -20,58 +15,80 @@ function startRecording() {
|
|
20 |
navigator.mediaDevices.getUserMedia({ audio: true })
|
21 |
.then(function (stream) {
|
22 |
mediaRecorder = new MediaRecorder(stream, { mimeType: 'audio/webm' });
|
23 |
-
mediaRecorder.start();
|
24 |
|
25 |
mediaRecorder.ondataavailable = function (e) {
|
26 |
-
|
|
|
|
|
27 |
};
|
28 |
|
29 |
-
|
30 |
-
|
31 |
-
|
|
|
|
|
|
|
|
|
|
|
32 |
isRecording = true;
|
33 |
})
|
34 |
.catch(function (err) {
|
35 |
-
console.error('
|
36 |
});
|
37 |
}
|
38 |
|
39 |
function stopRecording() {
|
40 |
mediaRecorder.stop();
|
|
|
41 |
|
42 |
mediaRecorder.onstop = function () {
|
43 |
const blob = new Blob(recordedChunks, { type: 'audio/webm' });
|
44 |
recordedChunks = [];
|
45 |
const audioURL = window.URL.createObjectURL(blob);
|
46 |
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
-
outputContainer.addEventListener('click', handleTranscribeClick, { once: true });
|
53 |
isRecording = false;
|
54 |
};
|
55 |
}
|
56 |
|
57 |
-
function
|
58 |
-
transcribeAudio();
|
59 |
-
}
|
60 |
-
|
61 |
-
function transcribeAudio() {
|
62 |
-
const blob = new Blob(recordedChunks, { type: 'audio/webm' });
|
63 |
const formData = new FormData();
|
64 |
formData.append('audio', blob, 'audio.webm');
|
65 |
|
66 |
-
|
|
|
|
|
|
|
|
|
|
|
67 |
method: 'POST',
|
68 |
body: formData
|
69 |
})
|
70 |
.then(response => response.json())
|
71 |
.then(data => {
|
72 |
-
|
|
|
73 |
})
|
74 |
.catch(error => {
|
75 |
-
console.error('Error:', error);
|
76 |
});
|
77 |
}
|
|
|
2 |
let mediaRecorder;
|
3 |
let recordedChunks = [];
|
4 |
|
5 |
+
// Handles the recording button click event
|
6 |
+
document.getElementById('recordButton').addEventListener('click', function () {
|
|
|
|
|
|
|
|
|
|
|
7 |
if (!isRecording) {
|
8 |
startRecording();
|
9 |
} else {
|
|
|
15 |
navigator.mediaDevices.getUserMedia({ audio: true })
|
16 |
.then(function (stream) {
|
17 |
mediaRecorder = new MediaRecorder(stream, { mimeType: 'audio/webm' });
|
|
|
18 |
|
19 |
mediaRecorder.ondataavailable = function (e) {
|
20 |
+
if (e.data.size > 0) {
|
21 |
+
recordedChunks.push(e.data);
|
22 |
+
}
|
23 |
};
|
24 |
|
25 |
+
mediaRecorder.start();
|
26 |
+
console.log("Recording started");
|
27 |
+
|
28 |
+
// Update button text and style
|
29 |
+
document.getElementById('recordButton').textContent = 'Stop';
|
30 |
+
document.getElementById('recordButton').classList.add('recording');
|
31 |
+
document.getElementById('recordStatus').textContent = 'Recording...';
|
32 |
+
document.getElementById('transcribeContainer').style.display = 'none'; // Hide Transcribe button initially
|
33 |
isRecording = true;
|
34 |
})
|
35 |
.catch(function (err) {
|
36 |
+
console.error('Error accessing media devices: ', err);
|
37 |
});
|
38 |
}
|
39 |
|
40 |
function stopRecording() {
|
41 |
mediaRecorder.stop();
|
42 |
+
console.log("Recording stopped");
|
43 |
|
44 |
mediaRecorder.onstop = function () {
|
45 |
const blob = new Blob(recordedChunks, { type: 'audio/webm' });
|
46 |
recordedChunks = [];
|
47 |
const audioURL = window.URL.createObjectURL(blob);
|
48 |
|
49 |
+
console.log(`Blob size: ${blob.size} bytes`); // Debugging statement
|
50 |
+
console.log(`Audio URL: ${audioURL}`); // Debugging statement
|
51 |
+
|
52 |
+
// Optionally, add a debug audio player to check if recording is valid
|
53 |
+
const debugAudio = document.createElement('audio');
|
54 |
+
debugAudio.controls = true;
|
55 |
+
debugAudio.src = audioURL;
|
56 |
+
document.body.appendChild(debugAudio);
|
57 |
+
|
58 |
+
// Update button text and style
|
59 |
+
document.getElementById('recordButton').textContent = 'Start';
|
60 |
+
document.getElementById('recordButton').classList.remove('recording');
|
61 |
+
document.getElementById('recordStatus').textContent = 'Tap to Record';
|
62 |
+
document.getElementById('transcribeContainer').style.display = 'block'; // Show Transcribe button
|
63 |
+
|
64 |
+
// Set up the Transcribe button click event
|
65 |
+
document.getElementById('transcribeButton').addEventListener('click', function () {
|
66 |
+
transcribeAudio(blob);
|
67 |
+
});
|
68 |
|
|
|
69 |
isRecording = false;
|
70 |
};
|
71 |
}
|
72 |
|
73 |
+
function transcribeAudio(blob) {
|
|
|
|
|
|
|
|
|
|
|
74 |
const formData = new FormData();
|
75 |
formData.append('audio', blob, 'audio.webm');
|
76 |
|
77 |
+
// Log FormData entries for debugging
|
78 |
+
for (const [key, value] of formData.entries()) {
|
79 |
+
console.log(`${key}: ${value}`);
|
80 |
+
}
|
81 |
+
|
82 |
+
fetch('https://jikoni-semabox.hf.space/transcribe', {
|
83 |
method: 'POST',
|
84 |
body: formData
|
85 |
})
|
86 |
.then(response => response.json())
|
87 |
.then(data => {
|
88 |
+
console.log(data); // Debugging statement
|
89 |
+
document.getElementById('output').textContent = data.transcription || 'No transcription available';
|
90 |
})
|
91 |
.catch(error => {
|
92 |
+
console.error('Error during transcription:', error);
|
93 |
});
|
94 |
}
|