Rafii commited on
Commit
9e67127
·
1 Parent(s): 95abe3b
Files changed (1) hide show
  1. streamlit_app.py +29 -0
streamlit_app.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from mlx_lm import load, generate
3
+
4
+ # Load your model and tokenizer
5
+
6
+ model, tokenizer = load("Rafii/f1llama")
7
+
8
+ prompt="hello"
9
+
10
+ if hasattr(tokenizer, "apply_chat_template") and tokenizer.chat_template is not None:
11
+ messages = [{"role": "user", "content": prompt}]
12
+ prompt = tokenizer.apply_chat_template(
13
+ messages, tokenize=False, add_generation_prompt=True
14
+ )
15
+
16
+ # response = generate(model, tokenizer, prompt=prompt, verbose=True)
17
+
18
+ st.title("Your F1 Bro")
19
+
20
+ # User input
21
+ user_input = st.text_input("Enter text:")
22
+
23
+ if st.button("Submit"):
24
+ # Tokenize input and make predictions
25
+ # inputs = tokenizer(user_input, return_tensors="pt")
26
+ # outputs = model(**inputs)
27
+ response = generate(model, tokenizer, prompt=user_input, verbose=True)
28
+
29
+ st.write(response)