charanhu commited on
Commit
67c43a5
·
1 Parent(s): 4e65e92

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -12
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
3
  import time
4
 
5
  # Load model and tokenizer
@@ -31,21 +32,18 @@ def add_text(history, text):
31
  history = history + [(text, None)]
32
  return history, gr.Textbox(value="", interactive=False)
33
 
34
- def bot(history, max_len, min_len, temp):
35
  user_input = history[-1][0]
36
- response = generate_text(user_input, max_length=max_len, min_length=min_len, temperature=temp)
37
- history[-1][1] = response
38
- for character in response:
39
- history[-1][1] += character
40
- time.sleep(0.05)
41
- yield history
42
 
43
  with gr.Blocks() as demo:
44
  chatbot = gr.Chatbot(
45
  [],
46
  elem_id="chatbot",
47
  bubble_full_width=False,
48
- avatar_images=(None, None),
49
  )
50
 
51
  with gr.Row():
@@ -56,12 +54,12 @@ with gr.Blocks() as demo:
56
  container=False,
57
  )
58
 
59
- max_len_slider = gr.Slider(0, 2048, 100, label="Max Length")
60
- min_len_slider = gr.Slider(0, 2048, 20, label="Min Length")
61
- temp_slider = gr.Slider(0.1, 2.0, 1.0, label="Temperature")
62
 
63
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
64
- bot, chatbot, max_len_slider, min_len_slider, temp_slider
65
  )
66
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
67
 
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import os
4
  import time
5
 
6
  # Load model and tokenizer
 
32
  history = history + [(text, None)]
33
  return history, gr.Textbox(value="", interactive=False)
34
 
35
+ def bot(history, max_length, min_length, temperature):
36
  user_input = history[-1][0]
37
+ generated_response = generate_text(user_input, max_length=max_length, min_length=min_length, temperature=temperature)
38
+ history[-1][1] = generated_response
39
+ time.sleep(0.5)
40
+ yield history
 
 
41
 
42
  with gr.Blocks() as demo:
43
  chatbot = gr.Chatbot(
44
  [],
45
  elem_id="chatbot",
46
  bubble_full_width=False,
 
47
  )
48
 
49
  with gr.Row():
 
54
  container=False,
55
  )
56
 
57
+ max_length_slider = gr.Slider(minimum=0, maximum=2048, value=10, label="Max Length")
58
+ min_length_slider = gr.Slider(minimum=0, maximum=2048, value=1, label="Min Length")
59
+ temperature_slider = gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature")
60
 
61
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
62
+ bot, chatbot, chatbot, api_name="bot_response", max_length=max_length_slider, min_length=min_length_slider, temperature=temperature_slider
63
  )
64
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
65