LAP-DEV commited on
Commit
cfab588
·
verified ·
1 Parent(s): 7883908

Update modules/translation/translation_base.py

Browse files
modules/translation/translation_base.py CHANGED
@@ -129,6 +129,56 @@ class TranslationBase(ABC):
129
  finally:
130
  self.release_cuda_memory()
131
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
  def offload(self):
133
  """Offload the model and free up the memory"""
134
  if self.model is not None:
 
129
  finally:
130
  self.release_cuda_memory()
131
 
132
+ def translate_text(self,
133
+ input_list_dict: list,
134
+ model_size: str,
135
+ src_lang: str,
136
+ tgt_lang: str,
137
+ max_length: int = 200,
138
+ add_timestamp: bool = True,
139
+ progress=gr.Progress()) -> list:
140
+ """
141
+ Translate text from source language to target language
142
+ Parameters
143
+ ----------
144
+ str_text: str
145
+ List[dict] to translate
146
+ model_size: str
147
+ Whisper model size from gr.Dropdown()
148
+ src_lang: str
149
+ Source language of the file to translate from gr.Dropdown()
150
+ tgt_lang: str
151
+ Target language of the file to translate from gr.Dropdown()
152
+ max_length: int
153
+ Max length per line to translate
154
+ add_timestamp: bool
155
+ Boolean value that determines whether to add a timestamp
156
+ progress: gr.Progress
157
+ Indicator to show progress directly in gradio.
158
+ I use a forked version of whisper for this. To see more info : https://github.com/jhj0517/jhj0517-whisper/tree/add-progress-callback
159
+ Returns
160
+ ----------
161
+ A List of
162
+ List[dict] with translation
163
+ """
164
+ try:
165
+ self.cache_parameters(model_size=model_size,src_lang=src_lang,tgt_lang=tgt_lang,max_length=max_length,add_timestamp=add_timestamp)
166
+ self.update_model(model_size=model_size,src_lang=src_lang,tgt_lang=tgt_lang,progress=progress)
167
+
168
+ total_progress = len(input_list_dict)
169
+ for index, dic in enumerate(input_list_dict):
170
+ progress(index / total_progress, desc="Translating..")
171
+ translated_text = self.translate(dic["text"], max_length=max_length)
172
+ dic["text"] = translated_text
173
+
174
+ return input_list_dict
175
+
176
+ except Exception as e:
177
+ print(f"Error translating file: {e}")
178
+ raise
179
+ finally:
180
+ self.release_cuda_memory()
181
+
182
  def offload(self):
183
  """Offload the model and free up the memory"""
184
  if self.model is not None: