Spaces:
Sleeping
Sleeping
Update static/js/design.js
Browse files- static/js/design.js +52 -79
static/js/design.js
CHANGED
@@ -1,94 +1,67 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
let mediaRecorder;
|
3 |
-
let
|
|
|
|
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
if (!isRecording) {
|
8 |
-
startRecording();
|
9 |
-
} else {
|
10 |
stopRecording();
|
|
|
|
|
11 |
}
|
12 |
});
|
13 |
|
14 |
-
|
15 |
-
navigator.mediaDevices.getUserMedia({ audio: true })
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
// Update button text and style
|
29 |
-
document.getElementById('recordButton').textContent = 'Stop';
|
30 |
-
document.getElementById('recordButton').classList.add('recording');
|
31 |
-
document.getElementById('recordStatus').textContent = 'Recording...';
|
32 |
-
document.getElementById('transcribeContainer').style.display = 'none'; // Hide Transcribe button initially
|
33 |
-
isRecording = true;
|
34 |
-
})
|
35 |
-
.catch(function (err) {
|
36 |
-
console.error('Error accessing media devices: ', err);
|
37 |
-
});
|
38 |
-
}
|
39 |
|
40 |
-
|
41 |
mediaRecorder.stop();
|
42 |
-
|
|
|
43 |
|
44 |
-
|
45 |
-
|
46 |
-
recordedChunks = [];
|
47 |
-
const audioURL = window.URL.createObjectURL(blob);
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
// Optionally, add a debug audio player to check if recording is valid
|
53 |
-
const debugAudio = document.createElement('audio');
|
54 |
-
debugAudio.controls = true;
|
55 |
-
debugAudio.src = audioURL;
|
56 |
-
document.body.appendChild(debugAudio);
|
57 |
|
58 |
-
|
59 |
-
|
60 |
-
document.getElementById('recordButton').classList.remove('recording');
|
61 |
-
document.getElementById('recordStatus').textContent = 'Tap to Record';
|
62 |
-
document.getElementById('transcribeContainer').style.display = 'block'; // Show Transcribe button
|
63 |
|
64 |
-
|
65 |
-
|
66 |
-
|
|
|
67 |
});
|
68 |
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
console.log(`${key}: ${value}`);
|
80 |
}
|
81 |
-
|
82 |
-
fetch('https://jikoni-semabox.hf.space/transcribe', {
|
83 |
-
method: 'POST',
|
84 |
-
body: formData
|
85 |
-
})
|
86 |
-
.then(response => response.json())
|
87 |
-
.then(data => {
|
88 |
-
console.log(data); // Debugging statement
|
89 |
-
document.getElementById('output').textContent = data.transcription || 'No transcription available';
|
90 |
-
})
|
91 |
-
.catch(error => {
|
92 |
-
console.error('Error during transcription:', error);
|
93 |
-
});
|
94 |
-
}
|
|
|
1 |
+
const micContainer = document.querySelector('.mic-container');
|
2 |
+
const circle = document.querySelector('.circle');
|
3 |
+
const audioPlayback = document.getElementById('audioPlayback');
|
4 |
+
const transcribeButton = document.getElementById('transcribeButton');
|
5 |
+
const transcriptionResult = document.getElementById('transcriptionResult');
|
6 |
+
const loadingSpinner = document.getElementById('loadingSpinner');
|
7 |
+
|
8 |
let mediaRecorder;
|
9 |
+
let audioChunks = [];
|
10 |
+
let audioBlob;
|
11 |
+
let audioUrl;
|
12 |
|
13 |
+
micContainer.addEventListener('click', async () => {
|
14 |
+
if (circle.classList.contains('active')) {
|
|
|
|
|
|
|
15 |
stopRecording();
|
16 |
+
} else {
|
17 |
+
await startRecording();
|
18 |
}
|
19 |
});
|
20 |
|
21 |
+
const startRecording = async () => {
|
22 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
23 |
+
mediaRecorder = new MediaRecorder(stream);
|
24 |
+
mediaRecorder.ondataavailable = event => audioChunks.push(event.data);
|
25 |
+
mediaRecorder.onstop = () => {
|
26 |
+
audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
|
27 |
+
audioUrl = URL.createObjectURL(audioBlob);
|
28 |
+
audioPlayback.src = audioUrl;
|
29 |
+
audioPlayback.style.display = 'block';
|
30 |
+
transcribeButton.style.display = 'block';
|
31 |
+
};
|
32 |
+
mediaRecorder.start();
|
33 |
+
circle.classList.add('active');
|
34 |
+
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
+
const stopRecording = () => {
|
37 |
mediaRecorder.stop();
|
38 |
+
circle.classList.remove('active');
|
39 |
+
};
|
40 |
|
41 |
+
transcribeButton.addEventListener('click', async () => {
|
42 |
+
if (!audioBlob) return;
|
|
|
|
|
43 |
|
44 |
+
const formData = new FormData();
|
45 |
+
formData.append('audio', audioBlob, 'recording.wav');
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
+
loadingSpinner.style.display = 'block';
|
48 |
+
transcriptionResult.textContent = '';
|
|
|
|
|
|
|
49 |
|
50 |
+
try {
|
51 |
+
const response = await fetch('https://jikoni-semabox.hf.space/transcribe', {
|
52 |
+
method: 'POST',
|
53 |
+
body: formData
|
54 |
});
|
55 |
|
56 |
+
if (response.ok) {
|
57 |
+
const result = await response.json();
|
58 |
+
transcriptionResult.textContent = result.transcription || 'No transcription available.';
|
59 |
+
} else {
|
60 |
+
transcriptionResult.textContent = `Error: ${response.status}`;
|
61 |
+
}
|
62 |
+
} catch (error) {
|
63 |
+
transcriptionResult.textContent = `Request failed: ${error.message}`;
|
64 |
+
} finally {
|
65 |
+
loadingSpinner.style.display = 'none';
|
|
|
66 |
}
|
67 |
+
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|