Sanzana Lora
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -15,6 +15,7 @@ get_lang_id = lambda lang: tokenizer._convert_token_to_id(
|
|
15 |
|
16 |
# Function for cross-lingual summarization
|
17 |
def cross_lingual_summarization(article_text, target_language):
|
|
|
18 |
input_ids = tokenizer(
|
19 |
[WHITESPACE_HANDLER(article_text)],
|
20 |
return_tensors="pt",
|
|
|
15 |
|
16 |
# Function for cross-lingual summarization
|
17 |
def cross_lingual_summarization(article_text, target_language):
|
18 |
+
target_language = ""
|
19 |
input_ids = tokenizer(
|
20 |
[WHITESPACE_HANDLER(article_text)],
|
21 |
return_tensors="pt",
|