d1ef commited on
Commit
a0f9f54
·
1 Parent(s): 6f7a741

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
2
  from transformers import ByT5Tokenizer
3
  import json
4
 
5
- ACTIONS = ["text2ids", "text2tokens", "ids2tokens", "tokens2ids and JSON requires double quotes"]
6
 
7
  def translate(model, action, inputs):
8
  tokenizer = ByT5Tokenizer.from_pretrained(model)
@@ -26,6 +26,10 @@ def translate(model, action, inputs):
26
  list = json.loads(input)
27
  tokens = tokenizer.convert_tokens_to_ids(list)
28
  output = tokens
 
 
 
 
29
 
30
 
31
  return f'{output}\n\n\n\nother infos:\njson:{json.dumps(output)} \nvocab_size: {vocab_size}\nlen(tokenizer): {len_tokenizer}'
 
2
  from transformers import ByT5Tokenizer
3
  import json
4
 
5
+ ACTIONS = ["text2ids", "text2tokens", "ids2tokens", "tokens2ids and JSON requires double quotes", "ids2text"]
6
 
7
  def translate(model, action, inputs):
8
  tokenizer = ByT5Tokenizer.from_pretrained(model)
 
26
  list = json.loads(input)
27
  tokens = tokenizer.convert_tokens_to_ids(list)
28
  output = tokens
29
+ if action == ACTIONS[4]:
30
+ list = json.loads(input)
31
+ text = tokenizer.decode(list)
32
+ output = text
33
 
34
 
35
  return f'{output}\n\n\n\nother infos:\njson:{json.dumps(output)} \nvocab_size: {vocab_size}\nlen(tokenizer): {len_tokenizer}'