noahabebe's picture
Update app.py
e2ed786 verified
raw
history blame
356 Bytes
import gradio as gr
from transformers import pipeline
gr.load("models/codellama/CodeLlama-7b-hf").launch()
# Prompt the user for input
user_input = input("AI: Ask me anything\nMe: ")
# Generate a response using the pipeline
generated_response = text_generation_pipeline(user_input, max_length=200)[0]['generated_text']
print("AI:", generated_response)