Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -5,14 +5,16 @@ import torch
|
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
6 |
|
7 |
DESCRIPTION = """\
|
8 |
-
# EvaByte
|
9 |
|
10 |
-
EvaByte is a byte-level language model
|
11 |
-
This
|
12 |
For full details on architecture, training recipe, and benchmarks, see their blog post and the project repository:
|
13 |
|
14 |
- Blog: <https://hkunlp.github.io/blog/2025/evabyte>
|
15 |
- GitHub: <https://github.com/OpenEvaByte/evabyte>
|
|
|
|
|
16 |
"""
|
17 |
|
18 |
MAX_MAX_NEW_TOKENS = 2048
|
|
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
6 |
|
7 |
DESCRIPTION = """\
|
8 |
+
# EvaByte [Byte-Level LLM]
|
9 |
|
10 |
+
EvaByte is a efficient byte-level language model with multibyte prediction and EVA attention, built by the University of Hong Kong and SambaNova Systems.
|
11 |
+
This Space is an unofficial demo of the instruction-tuned version [EvaByte/EvaByte-SFT](https://huggingface.co/EvaByte/EvaByte-SFT).
|
12 |
For full details on architecture, training recipe, and benchmarks, see their blog post and the project repository:
|
13 |
|
14 |
- Blog: <https://hkunlp.github.io/blog/2025/evabyte>
|
15 |
- GitHub: <https://github.com/OpenEvaByte/evabyte>
|
16 |
+
|
17 |
+
If you liked this Space, follow me on Twitter: [@KantaHayashiAI](https://x.com/KantaHayashiAI)
|
18 |
"""
|
19 |
|
20 |
MAX_MAX_NEW_TOKENS = 2048
|