Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ hf_hub_download(
|
|
32 |
)
|
33 |
|
34 |
# Set the title and description
|
35 |
-
title = "madlad400-3b-mt
|
36 |
description = """
|
37 |
I'm using [fairydreaming/T5-branch](https://github.com/fairydreaming/llama-cpp-python/tree/t5), I'm not sure current llama-cpp-python support t5
|
38 |
|
@@ -85,7 +85,7 @@ def respond(
|
|
85 |
n_batch=16,
|
86 |
n_ctx=512,
|
87 |
n_threads=2,
|
88 |
-
n_threads_batch=
|
89 |
|
90 |
tokens = llama.tokenize(f"<2ja>{message}".encode("utf-8"))
|
91 |
llama.encode(tokens)
|
|
|
32 |
)
|
33 |
|
34 |
# Set the title and description
|
35 |
+
title = "madlad400-3b-mt llama.cpp"
|
36 |
description = """
|
37 |
I'm using [fairydreaming/T5-branch](https://github.com/fairydreaming/llama-cpp-python/tree/t5), I'm not sure current llama-cpp-python support t5
|
38 |
|
|
|
85 |
n_batch=16,
|
86 |
n_ctx=512,
|
87 |
n_threads=2,
|
88 |
+
n_threads_batch=2,vrbose=False)
|
89 |
|
90 |
tokens = llama.tokenize(f"<2ja>{message}".encode("utf-8"))
|
91 |
llama.encode(tokens)
|