Spaces:
Sleeping
Sleeping
File size: 318 Bytes
f9e70a3 fd4c1b2 f9e70a3 fd4c1b2 |
1 2 3 4 5 6 7 8 9 10 11 |
import gradio as gr
from transformers import pipeline
pipe = pipeline("text-generation", model="42dot/42dot_LLM-PLM-1.3B")
def chat_with_model(prompt):
response = pipe(prompt)
return response[0]['generated_text']
interface = gr.Interface(fn=chat_with_model, inputs="text", outputs="text")
interface.launch() |