Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -151,56 +151,66 @@ def use_gtts_for_text(text):
|
|
151 |
print(f"gTTS error: {e}")
|
152 |
yield None
|
153 |
|
154 |
-
#
|
155 |
rtc_configuration = {
|
156 |
"iceServers": [
|
157 |
-
|
|
|
|
|
|
|
158 |
{
|
159 |
"urls": ["turn:openrelay.metered.ca:80"],
|
160 |
"username": "openrelayproject",
|
161 |
"credential": "openrelayproject"
|
162 |
},
|
|
|
|
|
163 |
{
|
164 |
"urls": ["turn:openrelay.metered.ca:443"],
|
165 |
"username": "openrelayproject",
|
166 |
"credential": "openrelayproject"
|
167 |
},
|
|
|
|
|
168 |
{
|
169 |
"urls": ["turn:openrelay.metered.ca:443?transport=tcp"],
|
170 |
"username": "openrelayproject",
|
171 |
"credential": "openrelayproject"
|
172 |
-
}
|
173 |
-
|
|
|
|
|
|
|
|
|
174 |
}
|
175 |
|
176 |
-
#
|
177 |
-
|
178 |
-
stream = Stream(
|
179 |
-
modality="audio",
|
180 |
-
mode="send-receive",
|
181 |
-
handler=ReplyOnPause(response, input_sample_rate=16000),
|
182 |
-
additional_outputs_handler=lambda a, b: b,
|
183 |
-
additional_inputs=[chatbot],
|
184 |
-
additional_outputs=[chatbot],
|
185 |
-
rtc_configuration=rtc_configuration if get_space() else None,
|
186 |
-
concurrency_limit=5 if get_space() else None,
|
187 |
-
time_limit=90 if get_space() else None,
|
188 |
-
ui_args={"title": "LLM Voice Chat (Powered by DeepSeek & ElevenLabs)"}
|
189 |
-
)
|
190 |
-
|
191 |
-
# Mount the Stream UI to the FastAPI app
|
192 |
-
app = FastAPI()
|
193 |
-
app = gr.mount_gradio_app(app, stream.ui, path="/")
|
194 |
|
195 |
-
#
|
196 |
-
|
197 |
-
|
198 |
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
206 |
|
|
|
|
|
|
|
|
151 |
print(f"gTTS error: {e}")
|
152 |
yield None
|
153 |
|
154 |
+
# Enhanced WebRTC configuration with more STUN/TURN servers
|
155 |
rtc_configuration = {
|
156 |
"iceServers": [
|
157 |
+
# Google's public STUN servers
|
158 |
+
{"urls": ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302", "stun:stun2.l.google.com:19302", "stun:stun3.l.google.com:19302", "stun:stun4.l.google.com:19302"]},
|
159 |
+
|
160 |
+
# OpenRelay TURN servers (HTTP)
|
161 |
{
|
162 |
"urls": ["turn:openrelay.metered.ca:80"],
|
163 |
"username": "openrelayproject",
|
164 |
"credential": "openrelayproject"
|
165 |
},
|
166 |
+
|
167 |
+
# OpenRelay TURN servers (HTTPS)
|
168 |
{
|
169 |
"urls": ["turn:openrelay.metered.ca:443"],
|
170 |
"username": "openrelayproject",
|
171 |
"credential": "openrelayproject"
|
172 |
},
|
173 |
+
|
174 |
+
# OpenRelay TURN servers (TCP)
|
175 |
{
|
176 |
"urls": ["turn:openrelay.metered.ca:443?transport=tcp"],
|
177 |
"username": "openrelayproject",
|
178 |
"credential": "openrelayproject"
|
179 |
+
},
|
180 |
+
|
181 |
+
# Additional public STUN servers
|
182 |
+
{"urls": ["stun:stun.stunprotocol.org:3478"]}
|
183 |
+
],
|
184 |
+
"iceCandidatePoolSize": 10 # Increase the pool size
|
185 |
}
|
186 |
|
187 |
+
# Set WebRTC logging level to "debug" for more information
|
188 |
+
os.environ["WEBRTC_TRACE"] = "WEBRTC_TRACE_ALL"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
|
190 |
+
# Create Gradio interface with simple structure
|
191 |
+
with gr.Blocks(title="LLM Voice Chat") as demo:
|
192 |
+
gr.Markdown("# LLM Voice Chat (Powered by DeepSeek & ElevenLabs)")
|
193 |
|
194 |
+
with gr.Row():
|
195 |
+
chatbot = gr.Chatbot(type="messages", label="Chat History")
|
196 |
+
|
197 |
+
with gr.Row():
|
198 |
+
# Create the Stream component for handling audio
|
199 |
+
stream_comp = Stream(
|
200 |
+
modality="audio",
|
201 |
+
mode="send-receive",
|
202 |
+
handler=ReplyOnPause(response, input_sample_rate=16000),
|
203 |
+
additional_outputs_handler=lambda a, b: b,
|
204 |
+
additional_inputs=[chatbot],
|
205 |
+
additional_outputs=[chatbot],
|
206 |
+
rtc_configuration=rtc_configuration,
|
207 |
+
concurrency_limit=5 if get_space() else None,
|
208 |
+
time_limit=90 if get_space() else None,
|
209 |
+
)
|
210 |
+
|
211 |
+
# Make sure to render the Stream component
|
212 |
+
stream_comp.render()
|
213 |
|
214 |
+
# For local development only
|
215 |
+
if __name__ == "__main__" and not get_space():
|
216 |
+
demo.launch(share=True)
|