bkoz commited on
Commit
5567d63
·
unverified ·
1 Parent(s): 0b7e612

added model download

Browse files
Files changed (1) hide show
  1. app.py +1 -0
app.py CHANGED
@@ -48,6 +48,7 @@ def load_model(fp):
48
  ]
49
 
50
  for prompt in prompts:
 
51
  output = llm(
52
  prompt,
53
  max_tokens=512,
 
48
  ]
49
 
50
  for prompt in prompts:
51
+ print('Making inference...')
52
  output = llm(
53
  prompt,
54
  max_tokens=512,