Spaces:
Sleeping
Sleeping
Update static/js/design.js
Browse files- static/js/design.js +29 -20
static/js/design.js
CHANGED
@@ -1,11 +1,14 @@
|
|
1 |
let isRecording = false;
|
2 |
-
let
|
|
|
3 |
let recordedChunks = [];
|
4 |
|
5 |
// Get the elements
|
6 |
const micContainer = document.getElementsByClassName('mic-container')[0];
|
7 |
const circle = micContainer.getElementsByClassName('circle')[0];
|
8 |
const outputContainer = document.getElementById('output');
|
|
|
|
|
9 |
|
10 |
// Handle the click event for the microphone container
|
11 |
micContainer.addEventListener('click', function () {
|
@@ -19,12 +22,9 @@ micContainer.addEventListener('click', function () {
|
|
19 |
function startRecording() {
|
20 |
navigator.mediaDevices.getUserMedia({ audio: true })
|
21 |
.then(function (stream) {
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
mediaRecorder.ondataavailable = function (e) {
|
26 |
-
recordedChunks.push(e.data);
|
27 |
-
};
|
28 |
|
29 |
// Update UI for recording
|
30 |
circle.classList.add('active');
|
@@ -37,44 +37,53 @@ function startRecording() {
|
|
37 |
}
|
38 |
|
39 |
function stopRecording() {
|
40 |
-
|
|
|
41 |
|
42 |
-
|
43 |
-
const blob = new Blob(recordedChunks, { type: 'audio/webm' });
|
44 |
-
recordedChunks = [];
|
45 |
const audioURL = window.URL.createObjectURL(blob);
|
46 |
|
47 |
// Update UI after recording
|
48 |
circle.classList.remove('active');
|
49 |
outputContainer.textContent = 'Click to Transcribe';
|
50 |
|
51 |
-
//
|
|
|
|
|
|
|
|
|
52 |
outputContainer.removeEventListener('click', handleTranscribeClick);
|
53 |
-
outputContainer.addEventListener('click',
|
|
|
|
|
54 |
|
55 |
isRecording = false;
|
56 |
-
};
|
57 |
}
|
58 |
|
59 |
function handleTranscribeClick() {
|
60 |
transcribeAudio();
|
61 |
}
|
62 |
|
63 |
-
function transcribeAudio() {
|
64 |
-
const blob = new Blob(recordedChunks, { type: 'audio/webm' });
|
65 |
const formData = new FormData();
|
66 |
-
formData.append('audio', blob, 'audio.
|
67 |
|
68 |
-
|
69 |
-
fetch('https://jikoni-semabox.hf.space/transcribe', { // https://tri4-semalab.hf.space/transcribe
|
70 |
method: 'POST',
|
71 |
body: formData
|
72 |
})
|
73 |
.then(response => response.json())
|
74 |
.then(data => {
|
75 |
-
|
|
|
|
|
|
|
|
|
|
|
76 |
})
|
77 |
.catch(error => {
|
78 |
console.error('Error:', error);
|
|
|
79 |
});
|
80 |
}
|
|
|
1 |
let isRecording = false;
|
2 |
+
let recorder;
|
3 |
+
let audioStream;
|
4 |
let recordedChunks = [];
|
5 |
|
6 |
// Get the elements
|
7 |
const micContainer = document.getElementsByClassName('mic-container')[0];
|
8 |
const circle = micContainer.getElementsByClassName('circle')[0];
|
9 |
const outputContainer = document.getElementById('output');
|
10 |
+
const audioPlayer = document.getElementById('audioPlayer');
|
11 |
+
const audioPlayerContainer = document.getElementById('audioPlayerContainer');
|
12 |
|
13 |
// Handle the click event for the microphone container
|
14 |
micContainer.addEventListener('click', function () {
|
|
|
22 |
function startRecording() {
|
23 |
navigator.mediaDevices.getUserMedia({ audio: true })
|
24 |
.then(function (stream) {
|
25 |
+
audioStream = stream;
|
26 |
+
recorder = new Recorder(stream, { mimeType: 'audio/wav' });
|
27 |
+
recorder.record();
|
|
|
|
|
|
|
28 |
|
29 |
// Update UI for recording
|
30 |
circle.classList.add('active');
|
|
|
37 |
}
|
38 |
|
39 |
function stopRecording() {
|
40 |
+
recorder.stop();
|
41 |
+
audioStream.getTracks().forEach(track => track.stop()); // Stop the audio stream
|
42 |
|
43 |
+
recorder.exportWAV(function (blob) {
|
|
|
|
|
44 |
const audioURL = window.URL.createObjectURL(blob);
|
45 |
|
46 |
// Update UI after recording
|
47 |
circle.classList.remove('active');
|
48 |
outputContainer.textContent = 'Click to Transcribe';
|
49 |
|
50 |
+
// Show audio player
|
51 |
+
audioPlayer.src = audioURL;
|
52 |
+
audioPlayerContainer.style.display = 'block';
|
53 |
+
|
54 |
+
// Set up transcription
|
55 |
outputContainer.removeEventListener('click', handleTranscribeClick);
|
56 |
+
outputContainer.addEventListener('click', function() {
|
57 |
+
transcribeAudio(blob);
|
58 |
+
});
|
59 |
|
60 |
isRecording = false;
|
61 |
+
});
|
62 |
}
|
63 |
|
64 |
function handleTranscribeClick() {
|
65 |
transcribeAudio();
|
66 |
}
|
67 |
|
68 |
+
function transcribeAudio(blob) {
|
|
|
69 |
const formData = new FormData();
|
70 |
+
formData.append('audio', blob, 'audio.wav');
|
71 |
|
72 |
+
fetch('https://jikoni-semabox.hf.space/transcribe', { // Update the URL as needed
|
|
|
73 |
method: 'POST',
|
74 |
body: formData
|
75 |
})
|
76 |
.then(response => response.json())
|
77 |
.then(data => {
|
78 |
+
console.log('API response:', data); // Log the API response
|
79 |
+
if (data && data.transcription) {
|
80 |
+
outputContainer.textContent = `Transcription: ${data.transcription}`;
|
81 |
+
} else {
|
82 |
+
outputContainer.textContent = 'Transcription failed. Check console for details.';
|
83 |
+
}
|
84 |
})
|
85 |
.catch(error => {
|
86 |
console.error('Error:', error);
|
87 |
+
outputContainer.textContent = 'An error occurred. Check console for details.';
|
88 |
});
|
89 |
}
|