Spaces:
Runtime error
Runtime error
Commit
·
1f8012b
1
Parent(s):
23f3344
notfix
Browse files
app.py
CHANGED
@@ -5,23 +5,19 @@ import transformers
|
|
5 |
import streamlit as st
|
6 |
|
7 |
# Import a Petals model
|
8 |
-
from src.client.remote_model import DistributedBloomForCausalLM
|
9 |
|
10 |
|
11 |
MODEL_NAME = "bigscience/test-bloomd-6b3" # select model you like
|
12 |
-
INITIAL_PEERS = ["/ip4/193.106.95.184/tcp/31000/p2p/QmSg7izCDtowVTACbUmWvEiQZNY4wgCQ9T9Doo66K59X6q"]
|
13 |
|
14 |
tokenizer = transformers.BloomTokenizerFast.from_pretrained(MODEL_NAME)
|
15 |
-
model =
|
16 |
-
MODEL_NAME,
|
17 |
-
initial_peers=INITIAL_PEERS,
|
18 |
-
).to("cpu")
|
19 |
|
20 |
text = st.text_input('Enter some text')
|
21 |
max_new_tokens = st.slider('Select a value', min_value=1, max_value=100)
|
22 |
|
23 |
if text:
|
24 |
-
model = DistributedBloomForCausalLM(MODEL_NAME, INITIAL_PEERS)
|
25 |
input_ids = tokenizer([text], return_tensors="pt").input_ids
|
26 |
output = model.generate(input_ids, max_new_tokens=max_new_tokens)
|
27 |
output_text = tokenizer.batch_decode(output)
|
|
|
5 |
import streamlit as st
|
6 |
|
7 |
# Import a Petals model
|
8 |
+
# from src.client.remote_model import DistributedBloomForCausalLM
|
9 |
|
10 |
|
11 |
MODEL_NAME = "bigscience/test-bloomd-6b3" # select model you like
|
12 |
+
# INITIAL_PEERS = ["/ip4/193.106.95.184/tcp/31000/p2p/QmSg7izCDtowVTACbUmWvEiQZNY4wgCQ9T9Doo66K59X6q"]
|
13 |
|
14 |
tokenizer = transformers.BloomTokenizerFast.from_pretrained(MODEL_NAME)
|
15 |
+
model = transformers.BloomForCausalLM.from_pretrained(MODEL_NAME)
|
|
|
|
|
|
|
16 |
|
17 |
text = st.text_input('Enter some text')
|
18 |
max_new_tokens = st.slider('Select a value', min_value=1, max_value=100)
|
19 |
|
20 |
if text:
|
|
|
21 |
input_ids = tokenizer([text], return_tensors="pt").input_ids
|
22 |
output = model.generate(input_ids, max_new_tokens=max_new_tokens)
|
23 |
output_text = tokenizer.batch_decode(output)
|