birgermoell's picture
Updated model
64fd31c
raw
history blame
1.33 kB
import streamlit as st
from transformers import AutoTokenizer, AutoModelWithLMHead, GPT2Tokenizer, GPT2Model, FlaxGPT2LMHeadModel, GPT2LMHeadModel, pipeline, set_seed
import torch
#===========================================#
# Loads Model and Pipeline #
#===========================================#
tokenizer = AutoTokenizer.from_pretrained("flax-community/swe-gpt-wiki")
model = AutoModelWithLMHead.from_pretrained("flax-community/swe-gpt-wiki")
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
# set_seed(42)
#===========================================#
# Streamlit Code #
#===========================================#
# result = st.sidebar.selectbox(
# "What language model would you like to try?",
# ("Swedish", "Norwegian", "Danish", "Nordic"))
st.title('Svensk GPT text generering')
desc = "En svensk GPT-modell tränad på wikipedia. Fyll i text i fältet nedanför för generering."
st.write(desc)
num_sentences = st.number_input('Number of Characters', min_value=1, max_value=150, value=75)
user_input = st.text_input('Fyll i text att generera ifrån')
if st.button('Generate Text'):
generated_text = generator(user_input, max_length=num_sentences, num_return_sequences=1)
st.write(generated_text[0]["generated_text"])