Spaces:
Sleeping
Sleeping
File size: 1,561 Bytes
014bc9b 8615598 014bc9b 8615598 014bc9b 8615598 014bc9b 8615598 014bc9b 8615598 014bc9b 8615598 014bc9b 8615598 f01883f 8615598 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import os
import requests
import gradio as gr
# Get Groq API key from environment variable
groq_api_key = os.environ.get("GROQ_API_KEY")
if not groq_api_key:
raise ValueError("Please set the GROQ_API_KEY in the Hugging Face Space secrets.")
# Groq API configuration
url = "https://api.groq.com/openai/v1/chat/completions"
headers = {
"Authorization": f"Bearer {groq_api_key}"
}
# Prompt template
template = """
You are a friendly and professional customer service assistant for a telecom company.
Respond to the customer's issue below with empathy and clear steps, especially for roaming support.
Customer Query: {query}
Your Response:
"""
# Function to call Groq API
def generate_response(user_query):
structured_prompt = template.format(query=user_query)
body = {
"model": "llama-3.1-8b-instant",
"messages": [
{
"role": "user",
"content": structured_prompt
}
]
}
response = requests.post(url, headers=headers, json=body)
if response.status_code == 200:
return response.json()['choices'][0]['message']['content']
else:
return f"Error {response.status_code}: {response.text}"
# Gradio interface
gr.Interface(
fn=generate_response,
inputs=gr.Textbox(lines=4, placeholder="Describe your telecom issue..."),
outputs=gr.Textbox(label="Groq API Response"),
title="Zain Customer Care Support Assistant",
description="Ask your question and get a helpful reply from our AI-powered support assistant."
).launch()
|