File size: 2,358 Bytes
5097da2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1e7f9fe
 
5097da2
 
 
397c96d
5097da2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import streamlit as st
import numpy as np
import pandas as pd
import os
import torch
import torch.nn as nn
from transformers import ElectraModel, AutoConfig, GPT2LMHeadModel
from transformers.activations import get_activation
from transformers import AutoTokenizer


st.title('Informal to Formal')
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

st.text('''How To Make Prompt:

informal english: space is huge and needs to be explored.
Translated into the Style of Abraham Lincoln: space awaits traversal, a new world whose boundaries are endless.
Translated into the Style of Abraham Lincoln: space is a boundless expanse, a vast virgin domain awaiting exploration.

informal english: i am very ready to do that just that.
Translated into the Style of Abraham Lincoln: you can assure yourself of my readiness to work toward this end.
Translated into the Style of Abraham Lincoln: please be assured that i am most ready to undertake this laborious task.

informal english: meteors are much harder to see, because they are only there for a fraction of a second.
Translated into the Style of Abraham Lincoln: meteors are not readily detectable, lasting for mere fractions of a second.

informal english:''')

st.text('''To See Other Prompts You Can Use, Check: https://huggingface.co/BigSalmon/MrLincoln10''')


from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("gpt2")
model = AutoModelWithLMHead.from_pretrained("BigSalmon/MrLincoln5")

with st.form(key='my_form'):
    prompt = st.text_area(label='Enter sentence')
    submit_button = st.form_submit_button(label='Submit')

    if submit_button:
      with torch.no_grad():
        text = tokenizer.encode(prompt)
        myinput, past_key_values = torch.tensor([text]), None
        myinput = myinput
        myinput= myinput.to(device)
        logits, past_key_values = model(myinput, past_key_values = past_key_values, return_dict=False)
        logits = logits[0,-1]
        probabilities = torch.nn.functional.softmax(logits)
        best_logits, best_indices = logits.topk(60)
        best_words = [tokenizer.decode([idx.item()]) for idx in best_indices]
        text.append(best_indices[0].item())
        best_probabilities = probabilities[best_indices].tolist()
        words = []              
        st.write(best_words)