Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,18 +12,25 @@ from huggingface_hub import HfFolder
|
|
12 |
HfFolder.save_token(HF_TOKEN)
|
13 |
|
14 |
from transformers import pipeline
|
|
|
15 |
# Load the model, specifying the use of GPU if available
|
16 |
-
device = 0 if torch.cuda.is_available() else -1 # use GPU if available
|
17 |
-
generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B', device=device)
|
18 |
|
19 |
-
#
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
|
|
|
|
|
22 |
text = st.text_area("your input")
|
23 |
|
24 |
if text:
|
25 |
out = generator(text, do_sample=False)
|
26 |
st.json(out)
|
|
|
27 |
|
28 |
|
29 |
|
|
|
12 |
HfFolder.save_token(HF_TOKEN)
|
13 |
|
14 |
from transformers import pipeline
|
15 |
+
|
16 |
# Load the model, specifying the use of GPU if available
|
|
|
|
|
17 |
|
18 |
+
#device = 0 if torch.cuda.is_available() else -1 # use GPU if available
|
19 |
+
|
20 |
+
#generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B', device=device)
|
21 |
+
|
22 |
+
|
23 |
+
generator = pipeline("text-generation", model="gpt-3")
|
24 |
|
25 |
|
26 |
+
st.title("text class")
|
27 |
+
st.write("your text.")
|
28 |
text = st.text_area("your input")
|
29 |
|
30 |
if text:
|
31 |
out = generator(text, do_sample=False)
|
32 |
st.json(out)
|
33 |
+
st.write(f"reply: {out}")
|
34 |
|
35 |
|
36 |
|