Update app.py
Browse files
app.py
CHANGED
|
@@ -72,7 +72,7 @@ def test():
|
|
| 72 |
llama_cpp.llama_backend_init(numa=False)
|
| 73 |
|
| 74 |
N_THREADS = multiprocessing.cpu_count()
|
| 75 |
-
MODEL_PATH =
|
| 76 |
|
| 77 |
prompt = b"translate English to German: The house is wonderful."
|
| 78 |
|
|
|
|
| 72 |
llama_cpp.llama_backend_init(numa=False)
|
| 73 |
|
| 74 |
N_THREADS = multiprocessing.cpu_count()
|
| 75 |
+
MODEL_PATH = "models/madlad400-3b-mt-q8_0.gguf"
|
| 76 |
|
| 77 |
prompt = b"translate English to German: The house is wonderful."
|
| 78 |
|