File size: 886 Bytes
a704a0c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import os
from groq import Groq
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
if not GROQ_API_KEY:
raise RuntimeError("GROQ_API_KEY environment variable not set. Please set it to your Groq API key.")
groq_client = Groq(api_key=GROQ_API_KEY)
def get_llm_response(user_text, context, emotion, tone_instruction):
prompt = f"""
You are a helpful and emotionally aware assistant.
The user's emotional state is: {emotion}.
{tone_instruction}
Using the following context, answer the user's question:
---
{context}
---
Question: {user_text}
"""
completion = groq_client.chat.completions.create(
model="meta-llama/llama-4-scout-17b-16e-instruct",
messages=[{"role": "user", "content": prompt}],
temperature=1,
max_completion_tokens=1024,
top_p=1,
stream=False,
stop=None,
)
return completion.choices[0].message.content |