Update app.py
Browse files
app.py
CHANGED
|
@@ -77,7 +77,7 @@ def respond(
|
|
| 77 |
if llama == None:
|
| 78 |
llama = Llama("models/madlad400-3b-mt-q8_0.gguf",flash_attn=False,
|
| 79 |
n_gpu_layers=0,
|
| 80 |
-
n_batch=
|
| 81 |
n_ctx=512,
|
| 82 |
n_threads=2,
|
| 83 |
n_threads_batch=2,verbose=False)
|
|
|
|
| 77 |
if llama == None:
|
| 78 |
llama = Llama("models/madlad400-3b-mt-q8_0.gguf",flash_attn=False,
|
| 79 |
n_gpu_layers=0,
|
| 80 |
+
n_batch=32,
|
| 81 |
n_ctx=512,
|
| 82 |
n_threads=2,
|
| 83 |
n_threads_batch=2,verbose=False)
|