Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import time | |
# Load model and tokenizer | |
tokenizer = AutoTokenizer.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T") | |
model = AutoModelForCausalLM.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T") | |
def generate_text(prompt, max_length=100, min_length=20, temperature=1.0): | |
# Tokenize the prompt | |
input_ids = tokenizer.encode(prompt, return_tensors="pt") | |
# Generate text | |
output = model.generate( | |
input_ids, | |
max_length=max_length, | |
min_length=min_length, | |
num_return_sequences=1, | |
temperature=temperature | |
) | |
# Decode the generated output | |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True) | |
return generated_text | |
def print_like_dislike(x: gr.LikeData): | |
print(x.index, x.value, x.liked) | |
def add_text(history, text): | |
history = history + [(text, None)] | |
return history, gr.Textbox(value="", interactive=False) | |
def bot(history, max_length=100, min_length=20, temperature=1.0): | |
prompt = history[-1][0] | |
response = generate_text(prompt, max_length=max_length, min_length=min_length, temperature=temperature) | |
history[-1][1] = "" | |
for character in response: | |
history[-1][1] += character | |
time.sleep(0.05) | |
yield history | |
with gr.Blocks() as demo: | |
chatbot = gr.Chatbot( | |
[], | |
elem_id="chatbot", | |
bubble_full_width=False, | |
avatar_images=(None, None), # Set avatar image path or URL | |
) | |
with gr.Row(): | |
txt = gr.Textbox( | |
scale=4, | |
show_label=False, | |
placeholder="Enter text and press enter, or upload an image", | |
container=False, | |
) | |
max_len_slider = gr.Slider(0, 2048, 100, label="Max Length") | |
min_len_slider = gr.Slider(0, 2048, 20, label="Min Length") | |
temp_slider = gr.Slider(0.1, 2.0, 1.0, label="Temperature") | |
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then( | |
bot, chatbot, chatbot, max_len_slider, min_len_slider, temp_slider | |
) | |
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False) | |
chatbot.like(print_like_dislike, None, None) | |
demo.queue() | |
if __name__ == "__main__": | |
demo.launch() | |