Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,27 +1,16 @@
|
|
1 |
import streamlit as st
|
2 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
|
5 |
# Load the Phi 2 model and tokenizer
|
6 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
7 |
-
|
8 |
-
trust_remote_code=True
|
9 |
-
)
|
10 |
-
|
11 |
-
model = AutoModelForCausalLM.from_pretrained(
|
12 |
-
"microsoft/phi-2",
|
13 |
-
device_map="auto",
|
14 |
-
trust_remote_code=True
|
15 |
-
)
|
16 |
-
|
17 |
-
# Offload the model to disk
|
18 |
-
model, tokenizer = disk_offload(model, tokenizer)
|
19 |
|
20 |
# Streamlit UI
|
21 |
st.title("Microsoft Phi 2 Streamlit App")
|
22 |
|
23 |
# User input prompt
|
24 |
-
prompt = st.text_area("Enter your prompt:", "
|
25 |
|
26 |
# Generate output based on user input
|
27 |
if st.button("Generate Output"):
|
|
|
1 |
import streamlit as st
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
|
5 |
# Load the Phi 2 model and tokenizer
|
6 |
+
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
|
7 |
+
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", device_map="auto", trust_remote_code=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
# Streamlit UI
|
10 |
st.title("Microsoft Phi 2 Streamlit App")
|
11 |
|
12 |
# User input prompt
|
13 |
+
prompt = st.text_area("Enter your prompt:", "Write a story about Nasa")
|
14 |
|
15 |
# Generate output based on user input
|
16 |
if st.button("Generate Output"):
|