File size: 1,749 Bytes
d3e5d4a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5cd3b22
3d63423
eebe021
8ddb492
 
 
 
3d63423
8ddb492
3d63423
 
 
 
 
 
 
 
 
 
 
8ddb492
5cd3b22
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM

# Load model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")
model = AutoModelForCausalLM.from_pretrained("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")

def generate_text(prompt, max_length=100, min_length=20, temperature=1.0):
    # Tokenize the prompt
    input_ids = tokenizer.encode(prompt, return_tensors="pt")

    # Generate text
    output = model.generate(
        input_ids,
        max_length=max_length,
        min_length=min_length,
        num_return_sequences=1,
        temperature=temperature
    )

    # Decode the generated output
    generated_text = tokenizer.decode(output[0], skip_special_tokens=True)

    return generated_text

with gr.Blocks() as demo:
    # Left Sidebar
    gr.Text("TinyLlama Text Generator")
    prompt_txt = gr.Textbox(label="User:", lines=2)
    max_len_slider = gr.Slider(0, 2048, 100, label="Max Length")
    min_len_slider = gr.Slider(0, 2048, 20, label="Min Length")
    temp_slider = gr.Slider(0.1, 2.0, 1.0, label="Temperature")

    submit_btn = gr.Button(value="Submit")
    
    # Right Conversation Panel
    chat_history = []

    def respond(message, chat_history):
        bot_message = generate_text(message, max_length=max_len_slider.value, min_length=min_len_slider.value, temperature=temp_slider.value)
        chat_history.append((message, bot_message))
        return "", chat_history

    submit_btn.click(respond, [prompt_txt, chat_history], [prompt_txt, chat_history])

    gr.Conversation([prompt_txt, max_len_slider, min_len_slider, temp_slider, submit_btn], [chat_history])

if __name__ == "__main__":
    demo.launch()