const micContainer = document.querySelector('.mic-container'); const circle = document.querySelector('.circle'); const audioPlayback = document.getElementById('audioPlayback'); const transcribeButton = document.getElementById('transcribeButton'); const transcriptionResult = document.getElementById('transcriptionResult'); const loadingSpinner = document.getElementById('loadingSpinner'); let mediaRecorder; let audioChunks = []; let audioBlob; let audioUrl; micContainer.addEventListener('click', async () => { if (circle.classList.contains('active')) { stopRecording(); } else { await startRecording(); } }); const startRecording = async () => { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); mediaRecorder = new MediaRecorder(stream); mediaRecorder.ondataavailable = event => audioChunks.push(event.data); mediaRecorder.onstop = () => { audioBlob = new Blob(audioChunks, { type: 'audio/wav' }); audioUrl = URL.createObjectURL(audioBlob); audioPlayback.src = audioUrl; audioPlayback.style.display = 'block'; transcribeButton.style.display = 'block'; }; mediaRecorder.start(); circle.classList.add('active'); transcribeButton.style.display = 'none'; // Hide transcribe button initially }; const stopRecording = () => { mediaRecorder.stop(); circle.classList.remove('active'); }; transcribeButton.addEventListener('click', async () => { if (!audioBlob) return; const formData = new FormData(); formData.append('audio', audioBlob, 'recording.wav'); loadingSpinner.style.display = 'block'; transcriptionResult.textContent = ''; try { const response = await fetch('https://jikoni-semabox.hf.space/transcribe', { method: 'POST', body: formData }); if (response.ok) { const result = await response.json(); transcriptionResult.textContent = result.transcription || 'No transcription available.'; } else { transcriptionResult.textContent = `Error: ${response.status}`; } } catch (error) { transcriptionResult.textContent = `Request failed: ${error.message}`; } finally { loadingSpinner.style.display = 'none'; } });