welcometoFightclub commited on
Commit
d83d3e2
·
verified ·
1 Parent(s): 303fc54

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -32,7 +32,7 @@ def predict_text_emotion(text):
32
  prompt = f"The user has entered text '{text}' classify user's emotion as happy or sad or anxious or angry. Respond in only one word."
33
  try:
34
  completion = client.chat.completions.create(
35
- model="llama-3.2-90b-vision-preview",
36
  messages=[{"role": "user", "content": prompt}],
37
  temperature=1,
38
  max_completion_tokens=64,
@@ -78,7 +78,7 @@ def detect_facial_emotion():
78
  return "neutral"
79
  try:
80
  completion = client.chat.completions.create(
81
- model="llama-3.2-90b-vision-preview",
82
  messages=[
83
  {
84
  "role": "user",
@@ -106,7 +106,7 @@ def generate_response(user_input, emotion):
106
  prompt = f"The user is feeling {emotion}. They said: '{user_input}'. Respond in a friendly caring manner with the user so the user feels being loved."
107
  try:
108
  completion = client.chat.completions.create(
109
- model="llama-3.2-90b-vision-preview",
110
  messages=[{"role": "user", "content": prompt}],
111
  temperature=1,
112
  max_completion_tokens=64,
 
32
  prompt = f"The user has entered text '{text}' classify user's emotion as happy or sad or anxious or angry. Respond in only one word."
33
  try:
34
  completion = client.chat.completions.create(
35
+ model="llama3-70b-8192",
36
  messages=[{"role": "user", "content": prompt}],
37
  temperature=1,
38
  max_completion_tokens=64,
 
78
  return "neutral"
79
  try:
80
  completion = client.chat.completions.create(
81
+ model="llama3-70b-8192",
82
  messages=[
83
  {
84
  "role": "user",
 
106
  prompt = f"The user is feeling {emotion}. They said: '{user_input}'. Respond in a friendly caring manner with the user so the user feels being loved."
107
  try:
108
  completion = client.chat.completions.create(
109
+ model="llama3-70b-8192",
110
  messages=[{"role": "user", "content": prompt}],
111
  temperature=1,
112
  max_completion_tokens=64,