Update handler.py
Browse files- handler.py +4 -4
handler.py
CHANGED
|
@@ -71,10 +71,10 @@ def predict(text, model, tokenizer):
|
|
| 71 |
#print("summary:", sorted(result_text, key=len)[3])
|
| 72 |
|
| 73 |
class EndpointHandler():
|
| 74 |
-
def __init__(self, path=""):
|
| 75 |
# load model and tokenizer from path
|
| 76 |
-
self.tokenizer = AutoTokenizer.from_pretrained(
|
| 77 |
-
self.model = AutoModel.from_pretrained(
|
| 78 |
|
| 79 |
|
| 80 |
def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -82,7 +82,7 @@ class EndpointHandler():
|
|
| 82 |
inputs = data.pop("inputs", data)
|
| 83 |
# process input text
|
| 84 |
prediction = predict(inputs['text'], self.model, self.tokenizer)
|
| 85 |
-
return
|
| 86 |
|
| 87 |
|
| 88 |
|
|
|
|
| 71 |
#print("summary:", sorted(result_text, key=len)[3])
|
| 72 |
|
| 73 |
class EndpointHandler():
|
| 74 |
+
def __init__(self, path="Lin0He/text-summary-gpt2-short"):
|
| 75 |
# load model and tokenizer from path
|
| 76 |
+
self.tokenizer = AutoTokenizer.from_pretrained(path)
|
| 77 |
+
self.model = AutoModel.from_pretrained(path)
|
| 78 |
|
| 79 |
|
| 80 |
def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
| 82 |
inputs = data.pop("inputs", data)
|
| 83 |
# process input text
|
| 84 |
prediction = predict(inputs['text'], self.model, self.tokenizer)
|
| 85 |
+
return {"generated_text": prediction}
|
| 86 |
|
| 87 |
|
| 88 |
|