Sanzana Lora
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ get_lang_id = lambda lang: tokenizer._convert_token_to_id(
|
|
14 |
)
|
15 |
|
16 |
# Function for cross-lingual summarization
|
17 |
-
def cross_lingual_summarization(
|
18 |
input_ids = tokenizer(
|
19 |
[WHITESPACE_HANDLER(article_text)],
|
20 |
return_tensors="pt",
|
|
|
14 |
)
|
15 |
|
16 |
# Function for cross-lingual summarization
|
17 |
+
def cross_lingual_summarization(target_language, article_text):
|
18 |
input_ids = tokenizer(
|
19 |
[WHITESPACE_HANDLER(article_text)],
|
20 |
return_tensors="pt",
|