|
|
|
import os, requests, gradio as gr |
|
|
|
|
|
groq_api_key = os.getenv("GROQ_API_KEY") |
|
|
|
url, headers = ( |
|
"https://api.groq.com/openai/v1/chat/completions", |
|
{"Authorization": f"Bearer {groq_api_key}"} |
|
) |
|
|
|
|
|
def chat_with_groq(user_input): |
|
body = { |
|
"model": "llama-3.1-8b-instant", |
|
"messages": [{"role": "user", "content": user_input}] |
|
} |
|
r = requests.post(url, headers=headers, json=body) |
|
return (r.json()['choices'][0]['message']['content'] |
|
if r.status_code == 200 else f"Error: {r.json()}") |
|
|
|
|
|
CSS = """ |
|
#header-row {align-items:center; gap:0.75rem;} |
|
#logo {max-width:60px; border-radius:8px;} |
|
#title {font-size:2rem; font-weight:600; margin:0;} |
|
#left-col {width:64%;} |
|
#right-col {width:35%; padding-left:1rem; border:1px solid #e5e5e5; |
|
border-radius:8px; padding:1rem;} |
|
""" |
|
|
|
LOGO_URL = ( |
|
"https://raw.githubusercontent.com/Decoding-Data-Science/" |
|
"airesidency/main/dds_logo.jpg" |
|
) |
|
|
|
LLM_QUESTIONS = [ |
|
"What is Retrieval-Augmented Generation (RAG)?", |
|
"Explain Chain-of-Thought prompting in simple terms.", |
|
"How do I fine-tune an LLM on my own data?", |
|
"What are the security risks of LLM applications?", |
|
"Compare zero-shot vs few-shot prompting.", |
|
"What is the role of vector databases with LLMs?" |
|
] |
|
|
|
|
|
with gr.Blocks(css=CSS, title="DDS Chat with Groq AI") as demo: |
|
|
|
with gr.Row(elem_id="header-row"): |
|
gr.Image(value=LOGO_URL, elem_id="logo", show_label=False, |
|
show_download_button=False) |
|
gr.Markdown("<div id='title'>DDS Chat with Groq AI (Llama 3.1-8B)</div>") |
|
|
|
gr.Markdown("Ask anythingβor pick a quick question on the right.") |
|
|
|
with gr.Row(): |
|
|
|
with gr.Column(elem_id="left-col"): |
|
chatbot = gr.Chatbot(height=450, label="Conversation") |
|
user_box = gr.Textbox(placeholder="Type your questionβ¦", |
|
show_label=False, lines=2) |
|
send_btn = gr.Button("Send", variant="primary") |
|
state = gr.State([]) |
|
|
|
|
|
with gr.Column(elem_id="right-col"): |
|
gr.Markdown("**LLM Quick Questions**") |
|
question_dd = gr.Dropdown(choices=LLM_QUESTIONS, |
|
label="Select a question", |
|
interactive=True) |
|
gr.Markdown( |
|
"Pick a topic and it will populate the input box. " |
|
"Feel free to edit before sending." |
|
) |
|
|
|
|
|
def respond(user_msg, history): |
|
reply = chat_with_groq(user_msg) |
|
history = history + [(user_msg, reply)] |
|
return history, history |
|
|
|
send_btn.click( |
|
fn=respond, |
|
inputs=[user_box, state], |
|
outputs=[chatbot, state], |
|
queue=False |
|
).then( |
|
lambda: "", None, user_box, queue=False |
|
) |
|
|
|
question_dd.change( |
|
lambda q: gr.update(value=q), |
|
inputs=question_dd, |
|
outputs=user_box, |
|
queue=False |
|
) |
|
|
|
demo.launch() |
|
|
|
|