charanhu commited on
Commit
ec6430d
·
1 Parent(s): b92c5c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -24
app.py CHANGED
@@ -1,5 +1,7 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
 
3
 
4
  # Load model and tokenizer
5
  tokenizer = AutoTokenizer.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")
@@ -23,38 +25,44 @@ def generate_text(prompt, max_length=100, min_length=20, temperature=1.0):
23
 
24
  return generated_text
25
 
26
- with gr.Blocks() as demo:
27
- chat_history = []
 
 
 
 
 
 
 
 
 
 
 
 
28
 
 
29
  chatbot = gr.Chatbot(
30
- chat_history,
31
  elem_id="chatbot",
32
  bubble_full_width=False,
33
- avatar_images=(None, None), # You can add avatars if needed
34
- )
35
-
36
- user_input = gr.Textbox(
37
- scale=4,
38
- show_label=False,
39
- placeholder="Type your message and press enter...",
40
- container=False,
41
  )
42
 
43
- def process_user_input(history, text):
44
- history.append(("User", text))
45
- return history, gr.Textbox(value="", interactive=False)
 
 
 
 
46
 
47
- user_input_msg = user_input.submit(
48
- process_user_input, [chat_history, user_input], [chatbot, user_input], queue=False
49
  )
 
50
 
51
- def generate_response(history):
52
- last_user_input = [msg for msg in history if msg[0] == "User"][-1][1]
53
- generated_response = generate_text(last_user_input)
54
- history.append(("Chatbot", generated_response))
55
- return history
56
-
57
- user_input_msg.then(generate_response, [chat_history], queue=False)
58
 
 
59
  if __name__ == "__main__":
60
- demo.launch()
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import os
4
+ import time
5
 
6
  # Load model and tokenizer
7
  tokenizer = AutoTokenizer.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")
 
25
 
26
  return generated_text
27
 
28
+ def print_like_dislike(x: gr.LikeData):
29
+ print(x.index, x.value, x.liked)
30
+
31
+ def add_text(history, text):
32
+ history = history + [(text, None)]
33
+ return history, gr.Textbox(value="", interactive=False)
34
+
35
+ def bot(history):
36
+ user_input = history[-1][0]
37
+ generated_response = generate_text(user_input)
38
+ history[-1][1] = generated_response
39
+ time.sleep(0.5)
40
+ yield history
41
+
42
 
43
+ with gr.Blocks() as demo:
44
  chatbot = gr.Chatbot(
45
+ [],
46
  elem_id="chatbot",
47
  bubble_full_width=False,
48
+ avatar_images=(None, (os.path.join(os.path.dirname(__file__), "avatar.png"))),
 
 
 
 
 
 
 
49
  )
50
 
51
+ with gr.Row():
52
+ txt = gr.Textbox(
53
+ scale=4,
54
+ show_label=False,
55
+ placeholder="Enter text and press enter, or upload an image",
56
+ container=False,
57
+ )
58
 
59
+ txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
60
+ bot, chatbot, chatbot, api_name="bot_response"
61
  )
62
+ txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
63
 
64
+ chatbot.like(print_like_dislike, None, None)
 
 
 
 
 
 
65
 
66
+ demo.queue()
67
  if __name__ == "__main__":
68
+ demo.launch()