BigSalmon commited on
Commit
cd84923
·
1 Parent(s): 694b539

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -6,6 +6,7 @@ import torch
6
  import torch.nn as nn
7
  from transformers.activations import get_activation
8
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
9
 
10
 
11
  st.title('GPT2: To see all prompt outlines: https://huggingface.co/BigSalmon/InformalToFormalLincoln64Paraphrase')
@@ -15,8 +16,11 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
15
  @st.cache(allow_output_mutation=True)
16
  def get_model():
17
 
18
- tokenizer = AutoTokenizer.from_pretrained("BigSalmon/InformalToFormalLincoln84Paraphrase")
19
- model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln84Paraphrase")
 
 
 
20
 
21
  #tokenizer = AutoTokenizer.from_pretrained("BigSalmon/InformalToFormalLincoln76ParaphraseXL")
22
  #model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln76ParaphraseXL", low_cpu_mem_usage=True)
 
6
  import torch.nn as nn
7
  from transformers.activations import get_activation
8
  from transformers import AutoTokenizer, AutoModelForCausalLM
9
+ from transformers import GPTNeoXForCausalLM, GPTNeoXTokenizerFast
10
 
11
 
12
  st.title('GPT2: To see all prompt outlines: https://huggingface.co/BigSalmon/InformalToFormalLincoln64Paraphrase')
 
16
  @st.cache(allow_output_mutation=True)
17
  def get_model():
18
 
19
+ tokenizer = GPTNeoXTokenizerFast.from_pretrained("CarperAI/FIM-NeoX-1.3B")
20
+ model = GPTNeoXForCausalLM.from_pretrained("BigSalmon/FormalInformalConcise2-FIM-NeoX-1.3B")
21
+
22
+ #tokenizer = AutoTokenizer.from_pretrained("BigSalmon/InformalToFormalLincoln84Paraphrase")
23
+ #model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln84Paraphrase")
24
 
25
  #tokenizer = AutoTokenizer.from_pretrained("BigSalmon/InformalToFormalLincoln76ParaphraseXL")
26
  #model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln76ParaphraseXL", low_cpu_mem_usage=True)