Spaces:
Runtime error
Runtime error
Commit
·
6566a4a
1
Parent(s):
2937f03
Update app.py
Browse files
app.py
CHANGED
@@ -22,11 +22,20 @@ from pydantic import BaseModel
|
|
22 |
from typing import Annotated
|
23 |
from transformers import BertTokenizerFast, EncoderDecoderModel
|
24 |
import torch
|
|
|
|
|
|
|
|
|
25 |
|
26 |
from fastapi import Form
|
27 |
|
28 |
class Query(BaseModel):
|
29 |
text: str
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
32 |
# tokenizer = BertTokenizerFast.from_pretrained('mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization')
|
@@ -103,4 +112,41 @@ async def get_answer(q: Query ):
|
|
103 |
|
104 |
|
105 |
return "hello"
|
106 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
from typing import Annotated
|
23 |
from transformers import BertTokenizerFast, EncoderDecoderModel
|
24 |
import torch
|
25 |
+
import threading
|
26 |
+
import random
|
27 |
+
import string
|
28 |
+
import time
|
29 |
|
30 |
from fastapi import Form
|
31 |
|
32 |
class Query(BaseModel):
|
33 |
text: str
|
34 |
+
|
35 |
+
|
36 |
+
class Query2(BaseModel):
|
37 |
+
text: str
|
38 |
+
host:str
|
39 |
|
40 |
# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
41 |
# tokenizer = BertTokenizerFast.from_pretrained('mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization')
|
|
|
112 |
|
113 |
|
114 |
return "hello"
|
115 |
+
|
116 |
+
|
117 |
+
@app.post("/large")
|
118 |
+
async def get_answer(q: Query2 ):
|
119 |
+
|
120 |
+
N = 20
|
121 |
+
res = ''.join(random.choices(string.ascii_uppercase +
|
122 |
+
string.digits, k=N))
|
123 |
+
res= res+ str(time.time())
|
124 |
+
|
125 |
+
id= res
|
126 |
+
text = q.text
|
127 |
+
host= q.host
|
128 |
+
t = threading.Thread(target=do_ML, args=(id,text,host))
|
129 |
+
t.start()
|
130 |
+
|
131 |
+
|
132 |
+
|
133 |
+
return JSONResponse({"id":id})
|
134 |
+
|
135 |
+
|
136 |
+
return "hello"
|
137 |
+
|
138 |
+
|
139 |
+
import requests
|
140 |
+
|
141 |
+
def do_ML(id:str,long_text:str,host:str):
|
142 |
+
try:
|
143 |
+
|
144 |
+
r= generate_summary(long_text)
|
145 |
+
data={"id":id,"result":r}
|
146 |
+
x=requests.post(host,data= data)
|
147 |
+
print(x.text)
|
148 |
+
|
149 |
+
|
150 |
+
except:
|
151 |
+
print("Error occured id= "+id)
|
152 |
+
|