Tri4 commited on
Commit
123aebe
·
verified ·
1 Parent(s): 0c8153a

Update static/js/design.js

Browse files
Files changed (1) hide show
  1. static/js/design.js +23 -28
static/js/design.js CHANGED
@@ -1,16 +1,15 @@
1
  let isRecording = false;
2
- let recorder;
3
- let audioStream;
4
  let recordedChunks = [];
5
 
6
- // Get the elements
7
  const micContainer = document.getElementsByClassName('mic-container')[0];
8
  const circle = micContainer.getElementsByClassName('circle')[0];
9
  const outputContainer = document.getElementById('output');
10
- const audioPlayer = document.getElementById('audioPlayer');
11
  const audioPlayerContainer = document.getElementById('audioPlayerContainer');
 
12
 
13
- // Handle the click event for the microphone container
14
  micContainer.addEventListener('click', function () {
15
  if (!isRecording) {
16
  startRecording();
@@ -22,13 +21,17 @@ micContainer.addEventListener('click', function () {
22
  function startRecording() {
23
  navigator.mediaDevices.getUserMedia({ audio: true })
24
  .then(function (stream) {
25
- audioStream = stream;
26
- recorder = new Recorder(stream, { mimeType: 'audio/wav' });
27
- recorder.record();
 
 
 
28
 
29
  // Update UI for recording
30
  circle.classList.add('active');
31
  outputContainer.textContent = 'Recording...';
 
32
  isRecording = true;
33
  })
34
  .catch(function (err) {
@@ -37,53 +40,45 @@ function startRecording() {
37
  }
38
 
39
  function stopRecording() {
40
- recorder.stop();
41
- audioStream.getTracks().forEach(track => track.stop()); // Stop the audio stream
42
 
43
- recorder.exportWAV(function (blob) {
 
 
44
  const audioURL = window.URL.createObjectURL(blob);
45
 
46
  // Update UI after recording
47
  circle.classList.remove('active');
48
  outputContainer.textContent = 'Click to Transcribe';
49
 
50
- // Show audio player
51
  audioPlayer.src = audioURL;
52
  audioPlayerContainer.style.display = 'block';
53
 
54
- // Set up transcription
55
- outputContainer.removeEventListener('click', handleTranscribeClick);
56
- outputContainer.addEventListener('click', function() {
57
- transcribeAudio(blob);
58
- });
59
-
60
  isRecording = false;
61
- });
62
  }
63
 
64
  function handleTranscribeClick() {
65
  transcribeAudio();
66
  }
67
 
68
- function transcribeAudio(blob) {
 
69
  const formData = new FormData();
70
  formData.append('audio', blob, 'audio.wav');
71
 
72
- fetch('https://jikoni-semabox.hf.space/transcribe', { // Update the URL as needed
73
  method: 'POST',
74
  body: formData
75
  })
76
  .then(response => response.json())
77
  .then(data => {
78
- console.log('API response:', data); // Log the API response
79
- if (data && data.transcription) {
80
- outputContainer.textContent = `Transcription: ${data.transcription}`;
81
- } else {
82
- outputContainer.textContent = 'Transcription failed. Check console for details.';
83
- }
84
  })
85
  .catch(error => {
86
  console.error('Error:', error);
87
- outputContainer.textContent = 'An error occurred. Check console for details.';
88
  });
89
  }
 
1
  let isRecording = false;
2
+ let mediaRecorder;
 
3
  let recordedChunks = [];
4
 
5
+ // Get elements
6
  const micContainer = document.getElementsByClassName('mic-container')[0];
7
  const circle = micContainer.getElementsByClassName('circle')[0];
8
  const outputContainer = document.getElementById('output');
 
9
  const audioPlayerContainer = document.getElementById('audioPlayerContainer');
10
+ const audioPlayer = document.getElementById('audioPlayer');
11
 
12
+ // Handle click event for the microphone container
13
  micContainer.addEventListener('click', function () {
14
  if (!isRecording) {
15
  startRecording();
 
21
  function startRecording() {
22
  navigator.mediaDevices.getUserMedia({ audio: true })
23
  .then(function (stream) {
24
+ mediaRecorder = new MediaRecorder(stream);
25
+ mediaRecorder.start();
26
+
27
+ mediaRecorder.ondataavailable = function (e) {
28
+ recordedChunks.push(e.data);
29
+ };
30
 
31
  // Update UI for recording
32
  circle.classList.add('active');
33
  outputContainer.textContent = 'Recording...';
34
+ audioPlayerContainer.style.display = 'none'; // Hide audio player initially
35
  isRecording = true;
36
  })
37
  .catch(function (err) {
 
40
  }
41
 
42
  function stopRecording() {
43
+ mediaRecorder.stop();
 
44
 
45
+ mediaRecorder.onstop = function () {
46
+ const blob = new Blob(recordedChunks, { type: 'audio/wav' });
47
+ recordedChunks = [];
48
  const audioURL = window.URL.createObjectURL(blob);
49
 
50
  // Update UI after recording
51
  circle.classList.remove('active');
52
  outputContainer.textContent = 'Click to Transcribe';
53
 
54
+ // Display audio player
55
  audioPlayer.src = audioURL;
56
  audioPlayerContainer.style.display = 'block';
57
 
58
+ // Handle transcription
59
+ document.getElementById('output').addEventListener('click', handleTranscribeClick, { once: true });
 
 
 
 
60
  isRecording = false;
61
+ };
62
  }
63
 
64
  function handleTranscribeClick() {
65
  transcribeAudio();
66
  }
67
 
68
+ function transcribeAudio() {
69
+ const blob = new Blob(recordedChunks, { type: 'audio/wav' });
70
  const formData = new FormData();
71
  formData.append('audio', blob, 'audio.wav');
72
 
73
+ fetch('https://jikoni-semabox.hf.space/transcribe', { // Replace with your API endpoint
74
  method: 'POST',
75
  body: formData
76
  })
77
  .then(response => response.json())
78
  .then(data => {
79
+ outputContainer.textContent = `Transcription: ${data.transcription || 'No transcription available'}`;
 
 
 
 
 
80
  })
81
  .catch(error => {
82
  console.error('Error:', error);
 
83
  });
84
  }