richardkimsm89 commited on
Commit
4505847
·
verified ·
1 Parent(s): 45de1a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -52,10 +52,12 @@ client = InferenceClient(api_key=hf_token)
52
 
53
  # Function to handle user inputs and fetch model responses
54
  def chatbot(input_text, history=[]):
55
- messages = [{"role": "user", "content": input_text}]
 
56
  for user_input, bot_response in history:
57
  messages.append({"role": "user", "content": user_input})
58
  messages.append({"role": "assistant", "content": bot_response})
 
59
 
60
  stream = client.chat.completions.create(
61
  model="google/gemma-2-2b-it",
@@ -68,6 +70,8 @@ def chatbot(input_text, history=[]):
68
 
69
  # Concatenate streamed response
70
  bot_response = "".join(chunk.choices[0].delta.content for chunk in stream)
 
 
71
  history.append((input_text, bot_response))
72
  return bot_response, history
73
 
 
52
 
53
  # Function to handle user inputs and fetch model responses
54
  def chatbot(input_text, history=[]):
55
+ #messages = [{"role": "user", "content": input_text}]
56
+ messages = []
57
  for user_input, bot_response in history:
58
  messages.append({"role": "user", "content": user_input})
59
  messages.append({"role": "assistant", "content": bot_response})
60
+ messages.append({"role": "user", "content": input_text})
61
 
62
  stream = client.chat.completions.create(
63
  model="google/gemma-2-2b-it",
 
70
 
71
  # Concatenate streamed response
72
  bot_response = "".join(chunk.choices[0].delta.content for chunk in stream)
73
+
74
+ # Update conversation history
75
  history.append((input_text, bot_response))
76
  return bot_response, history
77