noahabebe commited on
Commit
87c5f71
·
verified ·
1 Parent(s): e2ed786

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -6
app.py CHANGED
@@ -1,12 +1,21 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
- gr.load("models/codellama/CodeLlama-7b-hf").launch()
 
5
 
6
- # Prompt the user for input
7
- user_input = input("AI: Ask me anything\nMe: ")
 
 
 
8
 
9
- # Generate a response using the pipeline
10
- generated_response = text_generation_pipeline(user_input, max_length=200)[0]['generated_text']
 
 
 
 
 
 
11
 
12
- print("AI:", generated_response)
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # Load the text generation pipeline with the CodeLlama model
5
+ text_generation_pipeline = pipeline("text-generation", model="codellama/CodeLlama-70b-Instruct-hf")
6
 
7
+ # Define a function to generate responses based on user input
8
+ def generate_response(input_text):
9
+ # Generate a response using the pipeline
10
+ generated_response = text_generation_pipeline(input_text, max_length=200)[0]['generated_text']
11
+ return generated_response
12
 
13
+ # Create Gradio interface
14
+ gr.Interface(
15
+ fn=generate_response,
16
+ inputs="text",
17
+ outputs="text",
18
+ title="CodeLlama Assistant",
19
+ description="Ask me anything and I will respond!",
20
+ ).launch()
21