com3dian commited on
Commit
e514b11
·
verified ·
1 Parent(s): 972e898

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -12
app.py CHANGED
@@ -49,19 +49,21 @@ if uploaded_file is not None:
49
  essay = monkeyReader.readEssay(saved_file_path)
50
  for key, values in essay.items():
51
  st.write(f"{key}: {', '.join(values)}")
52
-
53
- Barttokenizer = BartTokenizer.from_pretrained('facebook/bart-large-cnn')
54
- summ_model_path = 'com3dian/Bart-large-paper2slides-summarizer'
55
- summarizor = BartForConditionalGeneration.from_pretrained(summ_model_path)
56
- exp_model_path = 'com3dian/Bart-large-paper2slides-expander'
57
- expandor = BartForConditionalGeneration.from_pretrained(exp_model_path)
58
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
59
- BartSE = BARTAutoEncoder(summarizor, summarizor, device)
60
- del summarizor, expandor
61
 
62
- document = Document(essay, Barttokenizer)
63
- del Barttokenizer
64
- length = document.merge(10, 30, BartSE, device)
 
 
 
 
 
 
 
 
 
 
 
65
 
66
  summarizor = pipeline("summarization", model=summ_model_path, device = 0)
67
  summ_text = summarizor(document.segmentation['text'], max_length=100, min_length=10, do_sample=False)
 
49
  essay = monkeyReader.readEssay(saved_file_path)
50
  for key, values in essay.items():
51
  st.write(f"{key}: {', '.join(values)}")
 
 
 
 
 
 
 
 
 
52
 
53
+ with st.status("Generating slides..."):
54
+
55
+ Barttokenizer = BartTokenizer.from_pretrained('facebook/bart-large-cnn')
56
+ summ_model_path = 'com3dian/Bart-large-paper2slides-summarizer'
57
+ summarizor = BartForConditionalGeneration.from_pretrained(summ_model_path)
58
+ exp_model_path = 'com3dian/Bart-large-paper2slides-expander'
59
+ expandor = BartForConditionalGeneration.from_pretrained(exp_model_path)
60
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
61
+ BartSE = BARTAutoEncoder(summarizor, summarizor, device)
62
+ del summarizor, expandor
63
+
64
+ document = Document(essay, Barttokenizer)
65
+ del Barttokenizer
66
+ length = document.merge(25, 30, BartSE, device)
67
 
68
  summarizor = pipeline("summarization", model=summ_model_path, device = 0)
69
  summ_text = summarizor(document.segmentation['text'], max_length=100, min_length=10, do_sample=False)