johntheajs commited on
Commit
b954f7c
·
verified ·
1 Parent(s): c73b05f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -0
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ # Load the model and tokenizer
6
+ model_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
8
+
9
+ model = AutoModelForCausalLM.from_pretrained(model_id)
10
+
11
+ # Function to generate responses based on user messages
12
+ def generate_response(messages):
13
+ input_ids = tokenizer.apply_chat_template(messages, return_tensors="pt").to(model.device)
14
+ outputs = model.generate(input_ids, max_new_tokens=100)
15
+ generated_response = tokenizer.decode(outputs[0], skip_special_tokens=True)
16
+ return generated_response
17
+
18
+ # Streamlit app
19
+ st.title("Mixtral Chatbot")
20
+
21
+ messages = []
22
+ user_input = st.text_input("You:", "")
23
+
24
+ if st.button("Send"):
25
+ if user_input:
26
+ messages.append({"role": "user", "content": user_input})
27
+ bot_response = generate_response(messages)
28
+ messages.append({"role": "assistant", "content": bot_response})
29
+ else:
30
+ st.warning("Please enter a message.")
31
+
32
+ # Display conversation
33
+ for message in messages:
34
+ if message["role"] == "user":
35
+ st.text_input("You:", value=message["content"], disabled=True)
36
+ elif message["role"] == "assistant":
37
+ st.text_area("Mixtral:", value=message["content"], disabled=True)