Spaces:
Runtime error
Runtime error
Commit
·
24cbf9c
1
Parent(s):
673aba3
Add tabs
Browse files
app.py
CHANGED
@@ -137,15 +137,20 @@ with gr.Blocks(css = css) as demo:
|
|
137 |
|
138 |
with gr.Column():
|
139 |
#gr.Markdown(""" ### Record audio """)
|
140 |
-
|
141 |
-
|
|
|
|
|
|
|
|
|
|
|
142 |
|
143 |
with gr.Row():
|
144 |
transcript_output = gr.Textbox(label="Transcription in the language you spoke", lines = 3)
|
145 |
emotion_output = gr.Textbox(label = "Detected Emotion")
|
146 |
|
147 |
-
|
148 |
-
|
149 |
gr.HTML('''
|
150 |
<div class="footer">
|
151 |
<p>Whisper Model by <a href="https://github.com/openai/whisper" style="text-decoration: underline;" target="_blank">OpenAI</a> -
|
|
|
137 |
|
138 |
with gr.Column():
|
139 |
#gr.Markdown(""" ### Record audio """)
|
140 |
+
with gr.Tab("Record Audio"):
|
141 |
+
audio_input_r = gr.Audio(label = 'Record Audio Input',source="microphone",type="filepath")
|
142 |
+
transcribe_audio_r = gr.Button('Transcribe')
|
143 |
+
|
144 |
+
with gr.Tab("Upload Audio as File"):
|
145 |
+
audio_input_u = gr.Audio(label = 'Upload Audio',source="upload",type="filepath")
|
146 |
+
transcribe_audio_u = gr.Button('Transcribe')
|
147 |
|
148 |
with gr.Row():
|
149 |
transcript_output = gr.Textbox(label="Transcription in the language you spoke", lines = 3)
|
150 |
emotion_output = gr.Textbox(label = "Detected Emotion")
|
151 |
|
152 |
+
transcribe_audio_r.click(translate_and_classify, inputs = audio_input_r, outputs = [transcript_output,emotion_output])
|
153 |
+
transcribe_audio_u.click(translate_and_classify, inputs = audio_input_u, outputs = [transcript_output,emotion_output])
|
154 |
gr.HTML('''
|
155 |
<div class="footer">
|
156 |
<p>Whisper Model by <a href="https://github.com/openai/whisper" style="text-decoration: underline;" target="_blank">OpenAI</a> -
|