""" # Inference import gradio as gr app = gr.load( "google/gemma-2-2b-it", src = "models", inputs = [gr.Textbox(label = "Input")], outputs = [gr.Textbox(label = "Output")], title = "Google Gemma", description = "Inference", examples = [ ["Hello, World."] ] ).launch() """ # Pipeline import gradio as gr from transformers import pipeline pipe = pipeline(model = "google/gemma-1.1-7b-it") def fn(input): output = pipe( input, #max_new_tokens = 100 ) return output[0]["generated_text"]#[len(input):] app = gr.Interface( fn = fn, inputs = [gr.Textbox(label = "Input")], outputs = [gr.Textbox(label = "Output")], title = "Google Gemma", description = "Pipeline", examples = [ ["Hello, World."] ] ).launch()