Qwentest12 / app.py
Kev09's picture
Update app.py
86226b8 verified
raw
history blame
433 Bytes
import gradio as gr
def greet(name):
return "Hello " + name + "!!"
from huggingface_hub import InferenceClient
# Use a pipeline as a high-level helper
from transformers import pipeline
messages = [
{"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="meta-llama/Llama-3.1-8B-Instruct")
pipe(messages)
#demo = gr.Interface(fn=greet, inputs="text", outputs="text")
#demo.launch()