Spaces:
Build error
Build error
| import os | |
| import requests | |
| import gradio as gr | |
| # Retrieve the API key from the environment variable | |
| groq_api_key = os.getenv("GROQ_API_KEY") | |
| if not groq_api_key: | |
| raise ValueError("GROQ_API_KEY is missing! Set it in the Hugging Face Spaces 'Secrets'.") | |
| # Define the API endpoint and headers | |
| url = "https://api.groq.com/openai/v1/chat/completions" | |
| headers = {"Authorization": f"Bearer {groq_api_key}"} | |
| # Function to interact with Groq API | |
| def chat_with_groq(user_input): | |
| body = { | |
| #"model": "llama-3.1-8b-instant", | |
| "model": "deepseek-r1-distill-qwen-32b", | |
| "messages": [{"role": "user", "content": user_input}] | |
| } | |
| response = requests.post(url, headers=headers, json=body) | |
| if response.status_code == 200: | |
| return response.json()['choices'][0]['message']['content'] | |
| else: | |
| return f"Error: {response.json()}" | |
| # Create Gradio interface | |
| interface = gr.Interface( | |
| fn=chat_with_groq, | |
| inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."), | |
| outputs=gr.Textbox(), | |
| title="Chat with Groq AI (Llama 3.1-8B)", | |
| description="Type your question below and get a response powered by Groq's Llama 3.1-8B model." | |
| ) | |
| # Launch Gradio app | |
| if __name__ == "__main__": | |
| interface.launch() | |