import os import requests import gradio as gr # Get Groq API key from Hugging Face secrets groq_api_key = os.environ.get("GROQ_API_KEY") if not groq_api_key: raise ValueError("Please set the GROQ_API_KEY in the Hugging Face Space secrets.") # Groq API configuration url = "https://api.groq.com/openai/v1/chat/completions" headers = { "Authorization": f"Bearer {groq_api_key}" } # Prompt template template = """ You are a friendly and professional customer service assistant for a telecom company. Respond to the customer's issue below with empathy and clear steps, especially for roaming support. Customer Query: {query} Your Response: """ # Core function to query the API def generate_response(user_query): structured_prompt = template.format(query=user_query) body = { "model": "llama-3.1-8b-instant", "messages": [{"role": "user", "content": structured_prompt}] } response = requests.post(url, headers=headers, json=body) if response.status_code == 200: return response.json()['choices'][0]['message']['content'], "✅ Success" else: return f"Error {response.status_code}: {response.text}", "❌ Failed" # Gradio app using Blocks layout with gr.Blocks() as demo: gr.Markdown("## 📡 Telecom Support Assistant powered by Groq API") gr.Markdown( "Enter your customer service query below. The AI assistant will respond with a helpful and empathetic reply, especially for **roaming issues**." ) with gr.Row(): with gr.Column(scale=2): user_input = gr.Textbox( lines=5, label="Customer Query", placeholder="e.g., My SIM card stopped working while traveling abroad." ) submit_btn = gr.Button("Generate Response") with gr.Column(scale=3): status_output = gr.Textbox(label="Status", interactive=False) response_output = gr.Textbox(label="AI Response", lines=10) submit_btn.click( fn=generate_response, inputs=user_input, outputs=[response_output, status_output] ) demo.launch()