Allahbux commited on
Commit
932a3aa
·
verified ·
1 Parent(s): 7e70937

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -20
app.py CHANGED
@@ -1,29 +1,37 @@
1
-
2
  import streamlit as st
3
- from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
4
-
5
- # Load the pipeline
6
- model_name = "Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2"
7
- pipe = pipeline("text-generation", model=model_name)
8
 
9
- # Optionally load the tokenizer and model directly (not used directly in this example)
10
- tokenizer = AutoTokenizer.from_pretrained(model_name)
11
- model = AutoModelForCausalLM.from_pretrained(model_name)
 
 
12
 
13
- def generate_response(prompt):
14
- """Generate a response from the model given a prompt."""
15
- response = pipe(prompt, max_length=100, num_return_sequences=1)
16
- return response[0]['generated_text']
17
 
18
- # Streamlit Interface
19
- st.title("AI Chatbot using Hugging Face")
20
- st.markdown("This app uses the Llama-3.1-8B-Lexi-Uncensored-V2 model to generate responses.")
 
 
 
 
 
 
21
 
22
- user_input = st.text_input("Enter your message:", placeholder="Type something here...")
 
 
 
 
 
23
 
 
24
  if st.button("Generate Response"):
25
- if user_input:
26
- response = generate_response(user_input)
27
- st.text_area("Response:", value=response, height=200)
 
28
  else:
29
  st.warning("Please enter a message before clicking the button.")
 
 
 
1
  import streamlit as st
2
+ from transformers import pipeline
 
 
 
 
3
 
4
+ # Load the model pipeline
5
+ @st.cache_resource # Cache the model to avoid reloading
6
+ def load_pipeline():
7
+ model_name = "Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2"
8
+ return pipeline("text-generation", model=model_name)
9
 
10
+ pipe = load_pipeline()
 
 
 
11
 
12
+ # App UI
13
+ st.set_page_config(page_title="Hugging Face Chatbot", layout="centered")
14
+ st.title("🤖 AI Chatbot")
15
+ st.markdown(
16
+ """
17
+ Welcome to the **AI Chatbot** powered by Hugging Face's Llama-3.1-8B-Lexi-Uncensored-V2 model.
18
+ Enter your message below to get started!
19
+ """
20
+ )
21
 
22
+ # Input Box
23
+ user_input = st.text_area(
24
+ "Your Message",
25
+ placeholder="Type your message here...",
26
+ height=100
27
+ )
28
 
29
+ # Generate Button
30
  if st.button("Generate Response"):
31
+ if user_input.strip():
32
+ with st.spinner("Generating response..."):
33
+ response = pipe(user_input, max_length=150, num_return_sequences=1)
34
+ st.text_area("Response", value=response[0]['generated_text'], height=200)
35
  else:
36
  st.warning("Please enter a message before clicking the button.")
37
+