Tri4 commited on
Commit
c4dd4fa
·
verified ·
1 Parent(s): be16eb7

Update static/js/design.js

Browse files
Files changed (1) hide show
  1. static/js/design.js +52 -79
static/js/design.js CHANGED
@@ -1,94 +1,67 @@
1
- let isRecording = false;
 
 
 
 
 
 
2
  let mediaRecorder;
3
- let recordedChunks = [];
 
 
4
 
5
- // Handles the recording button click event
6
- document.getElementById('recordButton').addEventListener('click', function () {
7
- if (!isRecording) {
8
- startRecording();
9
- } else {
10
  stopRecording();
 
 
11
  }
12
  });
13
 
14
- function startRecording() {
15
- navigator.mediaDevices.getUserMedia({ audio: true })
16
- .then(function (stream) {
17
- mediaRecorder = new MediaRecorder(stream, { mimeType: 'audio/webm' });
18
-
19
- mediaRecorder.ondataavailable = function (e) {
20
- if (e.data.size > 0) {
21
- recordedChunks.push(e.data);
22
- }
23
- };
24
-
25
- mediaRecorder.start();
26
- console.log("Recording started");
27
-
28
- // Update button text and style
29
- document.getElementById('recordButton').textContent = 'Stop';
30
- document.getElementById('recordButton').classList.add('recording');
31
- document.getElementById('recordStatus').textContent = 'Recording...';
32
- document.getElementById('transcribeContainer').style.display = 'none'; // Hide Transcribe button initially
33
- isRecording = true;
34
- })
35
- .catch(function (err) {
36
- console.error('Error accessing media devices: ', err);
37
- });
38
- }
39
 
40
- function stopRecording() {
41
  mediaRecorder.stop();
42
- console.log("Recording stopped");
 
43
 
44
- mediaRecorder.onstop = function () {
45
- const blob = new Blob(recordedChunks, { type: 'audio/webm' });
46
- recordedChunks = [];
47
- const audioURL = window.URL.createObjectURL(blob);
48
 
49
- console.log(`Blob size: ${blob.size} bytes`); // Debugging statement
50
- console.log(`Audio URL: ${audioURL}`); // Debugging statement
51
-
52
- // Optionally, add a debug audio player to check if recording is valid
53
- const debugAudio = document.createElement('audio');
54
- debugAudio.controls = true;
55
- debugAudio.src = audioURL;
56
- document.body.appendChild(debugAudio);
57
 
58
- // Update button text and style
59
- document.getElementById('recordButton').textContent = 'Start';
60
- document.getElementById('recordButton').classList.remove('recording');
61
- document.getElementById('recordStatus').textContent = 'Tap to Record';
62
- document.getElementById('transcribeContainer').style.display = 'block'; // Show Transcribe button
63
 
64
- // Set up the Transcribe button click event
65
- document.getElementById('transcribeButton').addEventListener('click', function () {
66
- transcribeAudio(blob);
 
67
  });
68
 
69
- isRecording = false;
70
- };
71
- }
72
-
73
- function transcribeAudio(blob) {
74
- const formData = new FormData();
75
- formData.append('audio', blob, 'audio.webm');
76
-
77
- // Log FormData entries for debugging
78
- for (const [key, value] of formData.entries()) {
79
- console.log(`${key}: ${value}`);
80
  }
81
-
82
- fetch('https://jikoni-semabox.hf.space/transcribe', {
83
- method: 'POST',
84
- body: formData
85
- })
86
- .then(response => response.json())
87
- .then(data => {
88
- console.log(data); // Debugging statement
89
- document.getElementById('output').textContent = data.transcription || 'No transcription available';
90
- })
91
- .catch(error => {
92
- console.error('Error during transcription:', error);
93
- });
94
- }
 
1
+ const micContainer = document.querySelector('.mic-container');
2
+ const circle = document.querySelector('.circle');
3
+ const audioPlayback = document.getElementById('audioPlayback');
4
+ const transcribeButton = document.getElementById('transcribeButton');
5
+ const transcriptionResult = document.getElementById('transcriptionResult');
6
+ const loadingSpinner = document.getElementById('loadingSpinner');
7
+
8
  let mediaRecorder;
9
+ let audioChunks = [];
10
+ let audioBlob;
11
+ let audioUrl;
12
 
13
+ micContainer.addEventListener('click', async () => {
14
+ if (circle.classList.contains('active')) {
 
 
 
15
  stopRecording();
16
+ } else {
17
+ await startRecording();
18
  }
19
  });
20
 
21
+ const startRecording = async () => {
22
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
23
+ mediaRecorder = new MediaRecorder(stream);
24
+ mediaRecorder.ondataavailable = event => audioChunks.push(event.data);
25
+ mediaRecorder.onstop = () => {
26
+ audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
27
+ audioUrl = URL.createObjectURL(audioBlob);
28
+ audioPlayback.src = audioUrl;
29
+ audioPlayback.style.display = 'block';
30
+ transcribeButton.style.display = 'block';
31
+ };
32
+ mediaRecorder.start();
33
+ circle.classList.add('active');
34
+ };
 
 
 
 
 
 
 
 
 
 
 
35
 
36
+ const stopRecording = () => {
37
  mediaRecorder.stop();
38
+ circle.classList.remove('active');
39
+ };
40
 
41
+ transcribeButton.addEventListener('click', async () => {
42
+ if (!audioBlob) return;
 
 
43
 
44
+ const formData = new FormData();
45
+ formData.append('audio', audioBlob, 'recording.wav');
 
 
 
 
 
 
46
 
47
+ loadingSpinner.style.display = 'block';
48
+ transcriptionResult.textContent = '';
 
 
 
49
 
50
+ try {
51
+ const response = await fetch('https://jikoni-semabox.hf.space/transcribe', {
52
+ method: 'POST',
53
+ body: formData
54
  });
55
 
56
+ if (response.ok) {
57
+ const result = await response.json();
58
+ transcriptionResult.textContent = result.transcription || 'No transcription available.';
59
+ } else {
60
+ transcriptionResult.textContent = `Error: ${response.status}`;
61
+ }
62
+ } catch (error) {
63
+ transcriptionResult.textContent = `Request failed: ${error.message}`;
64
+ } finally {
65
+ loadingSpinner.style.display = 'none';
 
66
  }
67
+ });