Spaces:
Sleeping
Sleeping
# Install required packages | |
import os | |
import requests | |
import gradio as gr | |
from google.colab import userdata # Secure storage for API keys in Colab | |
groq_api_key = userdata.get("GROQ_API_KEY") | |
# Define the URL for the Groq API endpoint | |
url = "https://api.groq.com/openai/v1/chat/completions" | |
# Set the headers for the API request | |
headers = { | |
"Authorization": f"Bearer {groq_api_key}" | |
} | |
# Function to interact with Groq API | |
def chat_with_groq(user_input): | |
body = { | |
"model": "llama-3.1-8b-instant", | |
"messages": [ | |
{"role": "user", "content": user_input} | |
] | |
} | |
response = requests.post(url, headers=headers, json=body) | |
if response.status_code == 200: | |
return response.json()['choices'][0]['message']['content'] | |
else: | |
return f"Error: {response.json()}" | |
# Create Gradio interface | |
interface = gr.Interface( | |
fn=chat_with_groq, | |
inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."), | |
outputs=gr.Textbox(), | |
title="DDS Chat with Groq AI (Llama 3.1-8B)", | |
description="Type your question below and get a response powered by Groq's Llama 3.1-8B model." | |
) | |
# Launch Gradio app | |
interface.launch() | |