File size: 2,586 Bytes
86e2a34
12476bc
437cdee
 
86e2a34
12476bc
 
86e2a34
 
5362bf1
86e2a34
 
5362bf1
86e2a34
 
 
 
 
5362bf1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86e2a34
5362bf1
 
 
 
 
 
 
 
 
 
 
 
12476bc
5362bf1
12476bc
5362bf1
86e2a34
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM
from transformers.utils import logging
import gradio as gr

# Define the logger instance for the transformers library
logger = logging.get_logger("transformers")

# Load the model and tokenizer
model_name = "TheBloke/zephyr-7B-beta-GPTQ" #"openai-community/gpt2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
#model = AutoModelForSeq2SeqLM.from_pretrained(model_name) 
model = AutoModelForCausalLM.from_pretrained(model_name)

# Fonction pour générer du texte
def generate_text(prompt):
    inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512, padding="max_length")
    summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
    return tokenizer.decode(summary_ids[0], skip_special_tokens=True)

# #for training the model after the data is collected
# #model.save_pretrained("model")
# #tokenizer.save_pretrained("model")

# #for the app functions
# def clear_save_textbox(message):
#     return " ", message

# def show_input_text(message,history:list[tuple[str,str]]):
#     history.append((message,""))
#     story = generate_text(message)
#     history[-1] = (message,story)
#     return history

# def delete_previous_text(history:list[tuple[str,str]]):
#     try:
#         message, _ = history.pop()
#     except IndexError:
#         message = " "
#     return history, message

# # Créer une interface de saisie avec Gradio
interface = gr.Interface(fn=generate_text, inputs="text", outputs="text",title="TeLLMyStory",description="Enter your story idea and the model will generate the story based on it.")
# with gr.Blocks() as demo:
#     gr.Markdown("TeLLMyStory chatbot")
#     #input_text = blocks.text(name="input_text", label="Enter your story idea here", default="Once upon a time, there was")
#     with gr.Row():
#         input_text = gr.Textbox(label="Enter your story idea here")
#         #clear_button = gr.Button("Clear",variant="secondary")
#         #clear_button.click(fn=clear_save_textbox, inputs=[input_text])
#         #retry_button = gr.Button("Retry", fn=delete_previous_text, inputs=[input_text],variants=["secondary"])

#     with gr.Row():
#         gr.Markdown("History of your story ideas")
#         gen_story = gr.Textbox(label="History")
    
#     #send_button = gr.Button(name="send_button", label="Send", fn=show_input_text, inputs=[input_text],outputs=[gen_story],variants=["primary"])

# # Lancer l'interface
interface.launch()