ahmeds26 commited on
Commit
a63edec
·
1 Parent(s): 6723dc1

Add apllication files

Browse files
Files changed (4) hide show
  1. app.py +54 -0
  2. bot.png +0 -0
  3. requirements.txt +5 -0
  4. user.png +0 -0
app.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+ from transformers import T5Tokenizer, T5ForConditionalGeneration
4
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
+ import tensorflow
6
+ import torch
7
+ import random
8
+ import time
9
+ import os
10
+
11
+
12
+ global default_model_name
13
+ default_model_name = "google/flan-t5-base"
14
+
15
+
16
+ def predict(input_text, model_name):
17
+ if model_name == "":
18
+ model_name = default_model_name
19
+
20
+ pipe = pipeline("text2text-generation", model=model_name)
21
+ generated_text = pipe(input_text, max_new_tokens=1000)
22
+
23
+ return generated_text[0]['generated_text']
24
+
25
+
26
+ with gr.Blocks() as demo:
27
+ gr.Markdown(
28
+ """
29
+ # Chatbot to interact with different Large Language Models (LLMs)
30
+ [Here](https://huggingface.co/models?pipeline_tag=text2text-generation) are some popular text2text large lamguage models.
31
+ Or use default model **"google/flan-t5-base"**
32
+ """)
33
+ input_model = gr.Textbox(label="Enter a custom Large Language Model name (LLM):")
34
+ chatbot = gr.Chatbot(height=300, label="A chatbot to interact with llm", avatar_images=((os.path.join(os.path.dirname(__file__), "user.png")), (os.path.join(os.path.dirname(__file__), "bot.png"))))
35
+ user_input = gr.Textbox()
36
+ clear = gr.ClearButton([user_input, chatbot, input_model])
37
+
38
+ def user(user_message, chat_history):
39
+ return "", chat_history + [[user_message, None]]
40
+
41
+ def respond(chat_history, input_model):
42
+ bot_message = predict(chat_history[-1][0], input_model)
43
+ chat_history[-1][1] = bot_message
44
+ time.sleep(2)
45
+ return chat_history
46
+
47
+ user_input.submit(user, [user_input, chatbot], [user_input, chatbot], queue=False).then(
48
+ respond, [chatbot, input_model], chatbot
49
+ )
50
+
51
+ clear.click(lambda: None, None, chatbot, queue=False)
52
+
53
+ demo.queue()
54
+ demo.launch()
bot.png ADDED
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ gradio
2
+ tensorflow
3
+ tensorflow_intel
4
+ torch
5
+ transformers
user.png ADDED