rashedalhuniti commited on
Commit
f042d67
·
verified ·
1 Parent(s): 9f16574

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -1,16 +1,15 @@
1
  import gradio as gr
2
- from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
- # Load model and tokenizer
5
- model = AutoModelForCausalLM.from_pretrained("inceptionai/jais-13b", trust_remote_code=True)
6
- tokenizer = AutoTokenizer.from_pretrained("inceptionai/jais-13b", trust_remote_code=True)
7
 
 
8
  def generate_text(prompt):
9
- inputs = tokenizer(prompt, return_tensors="pt")
10
- outputs = model.generate(**inputs)
11
- return tokenizer.decode(outputs[0], skip_special_tokens=True)
12
 
13
- # Create Gradio interface
14
- interface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
15
 
16
- interface.launch()
 
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
+ # Create a pipeline using the model you want
5
+ pipe = pipeline("text-generation", model="inceptionai/jais-13b", trust_remote_code=True)
 
6
 
7
+ # Function to generate text
8
  def generate_text(prompt):
9
+ return pipe(prompt)[0]['generated_text']
 
 
10
 
11
+ # Set up the Gradio interface
12
+ iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
13
 
14
+ # Launch the interface
15
+ iface.launch()