File size: 1,108 Bytes
a2320ed
b0a2a59
6ed43b1
4c1fe3c
9c084a1
79e7eb6
 
 
 
4c1fe3c
 
 
 
 
 
d1117d8
6ed43b1
79e7eb6
6ed43b1
79e7eb6
6ed43b1
d1117d8
4c1fe3c
 
 
a2320ed
6ed43b1
2f9e402
 
1fb8db8
 
 
 
 
2f9e402
 
 
1fb8db8
2f9e402
 
1fb8db8
2f9e402
6ed43b1
 
 
 
 
 
a2320ed
4c1fe3c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import streamlit as st
import os
from transformers import pipeline, set_seed
from huggingface_hub import HfFolder

import transformers
import torch


# Ensure the HF_TOKEN environment variable is set correctly
HF_TOKEN = os.getenv('HF_TOKEN')
if HF_TOKEN:
    HfFolder.save_token(HF_TOKEN)
else:
    st.warning("HF_TOKEN is not set. Proceeding without a token.")

# Use a valid model identifie

#generator = pipeline("text-generation", model="openai-community/gpt2")

generator = pipeline('text-generation', model='gpt2-large')

st.title("Text Generation")
st.write("Enter your text below.")
text = st.text_area("Your input")

st.write("Enter seed.")
seed_input = st.text_area("Set seed")



st.write("Enter max length.")
maxLength = st.text_area("max length")

# Convert seed input to integer
try:
    seed = int(seed_input)
    maxLength = int(maxLength)
except ValueError:
    seed = None
    maxLength = None
    
set_seed(seed)



if text and seed and maxLength:
    out = generator(text, max_length=maxLength, num_return_sequences=5)
    st.json(out)
    st.write(f"Reply: {out[0]['generated_text']}")