Spaces:
Runtime error
Runtime error
upd: pyttsx3 removed
Browse files- app.py +0 -7
- requirements.txt +1 -3
app.py
CHANGED
|
@@ -6,7 +6,6 @@ from transformers import (
|
|
| 6 |
AutoModelForDocumentQuestionAnswering,
|
| 7 |
)
|
| 8 |
import torch
|
| 9 |
-
import pyttsx3
|
| 10 |
|
| 11 |
|
| 12 |
tokenizer_ru2en = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-ru-en")
|
|
@@ -27,9 +26,6 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 27 |
git_model_base.to(device)
|
| 28 |
|
| 29 |
|
| 30 |
-
engine = pyttsx3.init()
|
| 31 |
-
|
| 32 |
-
|
| 33 |
def translate_ru2en(text):
|
| 34 |
inputs = tokenizer_ru2en(text, return_tensors="pt")
|
| 35 |
outputs = model_ru2en.generate(**inputs)
|
|
@@ -68,9 +64,6 @@ def generate_answer(image, question):
|
|
| 68 |
|
| 69 |
answer_ru = translate_en2ru(answer_en)
|
| 70 |
|
| 71 |
-
engine.say(answer_ru)
|
| 72 |
-
engine.runAndWait()
|
| 73 |
-
|
| 74 |
return answer_ru
|
| 75 |
|
| 76 |
|
|
|
|
| 6 |
AutoModelForDocumentQuestionAnswering,
|
| 7 |
)
|
| 8 |
import torch
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
tokenizer_ru2en = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-ru-en")
|
|
|
|
| 26 |
git_model_base.to(device)
|
| 27 |
|
| 28 |
|
|
|
|
|
|
|
|
|
|
| 29 |
def translate_ru2en(text):
|
| 30 |
inputs = tokenizer_ru2en(text, return_tensors="pt")
|
| 31 |
outputs = model_ru2en.generate(**inputs)
|
|
|
|
| 64 |
|
| 65 |
answer_ru = translate_en2ru(answer_en)
|
| 66 |
|
|
|
|
|
|
|
|
|
|
| 67 |
return answer_ru
|
| 68 |
|
| 69 |
|
requirements.txt
CHANGED
|
@@ -1,6 +1,4 @@
|
|
| 1 |
git+https://github.com/facebookresearch/detectron2.git
|
| 2 |
pytesseract
|
| 3 |
-
pyttsx3
|
| 4 |
sentencepiece
|
| 5 |
-
torchvision
|
| 6 |
-
sacremoses
|
|
|
|
| 1 |
git+https://github.com/facebookresearch/detectron2.git
|
| 2 |
pytesseract
|
|
|
|
| 3 |
sentencepiece
|
| 4 |
+
torchvision
|
|
|