danielritchie commited on
Commit
b9c512e
·
verified ·
1 Parent(s): c5f5a18

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -6
app.py CHANGED
@@ -1,17 +1,22 @@
1
  import gradio as gr
2
 
3
- # Function to interact with the model
 
 
 
4
  def chatbot_response(input_text):
5
- # Here you would use your model for inference
6
- # This is a placeholder for your actual model's prediction logic
7
- response = f"Model Response: {input_text}"
8
  return response
9
 
10
  # Create the Gradio interface
11
  with gr.Blocks() as demo:
12
- chatbot = gr.Chatbot()
13
  with gr.Row():
14
- txt = gr.Textbox(show_label=False, placeholder="Enter text and press Enter").style(container=False)
 
 
15
  txt.submit(chatbot_response, txt, chatbot)
16
 
 
17
  demo.launch()
 
1
  import gradio as gr
2
 
3
+ # Load your LLaMA model using gr.load
4
+ model = gr.load("models/meta-llama/Llama-3.2-1B")
5
+
6
+ # Function to generate responses from the model
7
  def chatbot_response(input_text):
8
+ # Call the model directly through gr.load, assuming the model accepts simple text input
9
+ response = model(input_text)
 
10
  return response
11
 
12
  # Create the Gradio interface
13
  with gr.Blocks() as demo:
14
+ chatbot = gr.Chatbot() # This is for displaying the conversation
15
  with gr.Row():
16
+ txt = gr.Textbox(show_label=False, placeholder="Enter your message and press Enter").style(container=False)
17
+
18
+ # When the user submits input, call the chatbot response function
19
  txt.submit(chatbot_response, txt, chatbot)
20
 
21
+ # Launch the Gradio interface
22
  demo.launch()