Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,7 +1,10 @@
|
|
| 1 |
from gradio import Interface
|
| 2 |
import gradio as gr
|
| 3 |
import aranizer
|
| 4 |
-
from aranizer import
|
|
|
|
|
|
|
|
|
|
| 5 |
from transformers import AutoTokenizer, logging
|
| 6 |
from huggingface_hub import login
|
| 7 |
import os
|
|
@@ -26,19 +29,14 @@ except Exception as e:
|
|
| 26 |
meta_llama_tokenizer = None
|
| 27 |
logging.warning(f"Could not load meta-llama/Meta-Llama-3-8B tokenizer: {e}")
|
| 28 |
|
| 29 |
-
cohere_command_r_v01_tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-v01")
|
| 30 |
-
cohere_command_r_plus_tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-plus")
|
| 31 |
-
|
| 32 |
# List of available tokenizers and a dictionary to load them
|
| 33 |
tokenizer_options = [
|
| 34 |
"aranizer_bpe50k", "aranizer_bpe64k", "aranizer_bpe86k",
|
| 35 |
"aranizer_sp32k", "aranizer_sp50k", "aranizer_sp64k", "aranizer_sp86k",
|
| 36 |
-
"FreedomIntelligence/AceGPT-13B",
|
| 37 |
-
"FreedomIntelligence/AceGPT-7B",
|
| 38 |
"inception-mbzuai/jais-13b",
|
| 39 |
-
"aubmindlab/bert-base-arabertv2"
|
| 40 |
-
"CohereForAI/c4ai-command-r-v01",
|
| 41 |
-
"CohereForAI/c4ai-command-r-plus"
|
| 42 |
]
|
| 43 |
|
| 44 |
if meta_llama_tokenizer:
|
|
@@ -55,9 +53,7 @@ tokenizers = {
|
|
| 55 |
"FreedomIntelligence/AceGPT-13B": lambda: gpt_13b_tokenizer,
|
| 56 |
"FreedomIntelligence/AceGPT-7B": lambda: gpt_7b_tokenizer,
|
| 57 |
"inception-mbzuai/jais-13b": lambda: jais_13b_tokenizer,
|
| 58 |
-
"aubmindlab/bert-base-arabertv2": lambda: arabert_tokenizer
|
| 59 |
-
"CohereForAI/c4ai-command-r-v01": lambda: cohere_command_r_v01_tokenizer,
|
| 60 |
-
"CohereForAI/c4ai-command-r-plus": lambda: cohere_command_r_plus_tokenizer
|
| 61 |
}
|
| 62 |
|
| 63 |
if meta_llama_tokenizer:
|
|
@@ -112,4 +108,4 @@ iface = Interface(
|
|
| 112 |
)
|
| 113 |
|
| 114 |
# Launching the Gradio app
|
| 115 |
-
iface.launch()
|
|
|
|
| 1 |
from gradio import Interface
|
| 2 |
import gradio as gr
|
| 3 |
import aranizer
|
| 4 |
+
from aranizer import (
|
| 5 |
+
aranizer_bpe50k, aranizer_bpe64k, aranizer_bpe86k,
|
| 6 |
+
aranizer_sp32k, aranizer_sp50k, aranizer_sp64k, aranizer_sp86k
|
| 7 |
+
)
|
| 8 |
from transformers import AutoTokenizer, logging
|
| 9 |
from huggingface_hub import login
|
| 10 |
import os
|
|
|
|
| 29 |
meta_llama_tokenizer = None
|
| 30 |
logging.warning(f"Could not load meta-llama/Meta-Llama-3-8B tokenizer: {e}")
|
| 31 |
|
|
|
|
|
|
|
|
|
|
| 32 |
# List of available tokenizers and a dictionary to load them
|
| 33 |
tokenizer_options = [
|
| 34 |
"aranizer_bpe50k", "aranizer_bpe64k", "aranizer_bpe86k",
|
| 35 |
"aranizer_sp32k", "aranizer_sp50k", "aranizer_sp64k", "aranizer_sp86k",
|
| 36 |
+
"FreedomIntelligence/AceGPT-13B",
|
| 37 |
+
"FreedomIntelligence/AceGPT-7B",
|
| 38 |
"inception-mbzuai/jais-13b",
|
| 39 |
+
"aubmindlab/bert-base-arabertv2"
|
|
|
|
|
|
|
| 40 |
]
|
| 41 |
|
| 42 |
if meta_llama_tokenizer:
|
|
|
|
| 53 |
"FreedomIntelligence/AceGPT-13B": lambda: gpt_13b_tokenizer,
|
| 54 |
"FreedomIntelligence/AceGPT-7B": lambda: gpt_7b_tokenizer,
|
| 55 |
"inception-mbzuai/jais-13b": lambda: jais_13b_tokenizer,
|
| 56 |
+
"aubmindlab/bert-base-arabertv2": lambda: arabert_tokenizer
|
|
|
|
|
|
|
| 57 |
}
|
| 58 |
|
| 59 |
if meta_llama_tokenizer:
|
|
|
|
| 108 |
)
|
| 109 |
|
| 110 |
# Launching the Gradio app
|
| 111 |
+
iface.launch()
|