Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,18 @@
|
|
1 |
import streamlit as st
|
2 |
# Load model directly
|
3 |
from transformers import AutoModel
|
|
|
|
|
4 |
|
5 |
file = 'llama-2-7b.Q4_K_M.gguf'
|
6 |
NO_GPU = 0
|
7 |
GPU_LAYERS = 50
|
8 |
model = AutoModel.from_pretrained("TheBloke/Llama-2-7B-GGUF" + file,
|
9 |
model_file=file, model_type="llama", gpu_layers=NO_GPU)
|
|
|
|
|
|
|
|
|
10 |
prompt = "AI is going to"
|
11 |
|
12 |
with st.container():
|
|
|
1 |
import streamlit as st
|
2 |
# Load model directly
|
3 |
from transformers import AutoModel
|
4 |
+
from huggingface_hub import login
|
5 |
+
import os
|
6 |
|
7 |
file = 'llama-2-7b.Q4_K_M.gguf'
|
8 |
NO_GPU = 0
|
9 |
GPU_LAYERS = 50
|
10 |
model = AutoModel.from_pretrained("TheBloke/Llama-2-7B-GGUF" + file,
|
11 |
model_file=file, model_type="llama", gpu_layers=NO_GPU)
|
12 |
+
|
13 |
+
access_token = os.getenv('HF_TOKEN')
|
14 |
+
login(token = access_token)
|
15 |
+
|
16 |
prompt = "AI is going to"
|
17 |
|
18 |
with st.container():
|