Update app.py
Browse files
app.py
CHANGED
@@ -14,6 +14,7 @@ llm = Llama(
|
|
14 |
n_ctx=2048,
|
15 |
chat_format="llama-3",
|
16 |
n_gpu_layers=-1, # ensure all layers are on GPU
|
|
|
17 |
)
|
18 |
|
19 |
# Placeholder responses for when context is empty
|
|
|
14 |
n_ctx=2048,
|
15 |
chat_format="llama-3",
|
16 |
n_gpu_layers=-1, # ensure all layers are on GPU
|
17 |
+
n_threads=1, # no CPU multi-threading
|
18 |
)
|
19 |
|
20 |
# Placeholder responses for when context is empty
|