broskicodes commited on
Commit
058eed9
·
1 Parent(s): 96722fb

lower text generation limit

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -148,10 +148,9 @@ decode = lambda l: ''.join([itos[i] for i in l]) # decoder: take a list of integ
148
  model = torch.load('complete-model.pt', map_location=device)
149
 
150
  # inference
151
- slider_value = st.slider('Amount of text to generate', min_value=100, max_value=2000, value=500, step=5)
152
  if st.button('Generat text'):
153
  context = torch.zeros((1, 1), dtype=torch.long, device=device)
154
  text = model.generate(context, max_new_tokens=slider_value)[0].tolist()
155
- st.json(decode(text))
156
- #
157
 
 
148
  model = torch.load('complete-model.pt', map_location=device)
149
 
150
  # inference
151
+ slider_value = st.slider('Amount of text to generate', min_value=20, max_value=200, value=50, step=5)
152
  if st.button('Generat text'):
153
  context = torch.zeros((1, 1), dtype=torch.long, device=device)
154
  text = model.generate(context, max_new_tokens=slider_value)[0].tolist()
155
+ st.text(decode(text))
 
156