Spaces:
Sleeping
Sleeping
File size: 775 Bytes
dcee3e6 18bc429 a2320ed b0a2a59 7810222 b0a2a59 a2320ed 9c084a1 bec846f a2320ed 9c084a1 5965e2a d1117d8 9c084a1 a2320ed d1117d8 a2320ed 9c084a1 a2320ed a7938b4 dcee3e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import streamlit as st
import os
import torch # Add this line to import the torch library
import transformers
HF_TOKEN = os.getenv('HF_TOKEN')
from huggingface_hub import HfFolder
# Set the token using HfFolder (this persists the token)
HfFolder.save_token(HF_TOKEN)
from transformers import pipeline
# Load the model, specifying the use of GPU if available
#device = 0 if torch.cuda.is_available() else -1 # use GPU if available
#generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B', device=device)
generator = pipeline("text-generation", model="gpt-2")
st.title("text class")
st.write("your text.")
text = st.text_area("your input")
if text:
out = generator(text, do_sample=False)
st.json(out)
st.write(f"reply: {out}")
|