change model
Browse files
model_params.cfg
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
[generator]
|
2 |
PROVIDER = huggingface
|
3 |
-
MODEL = meta-llama/Llama-3
|
4 |
-
MAX_TOKENS =
|
5 |
TEMPERATURE = 0.2
|
6 |
-
INFERENCE_PROVIDER =
|
7 |
ORGANIZATION = GIZ
|
8 |
|
9 |
[reader]
|
|
|
1 |
[generator]
|
2 |
PROVIDER = huggingface
|
3 |
+
MODEL = meta-llama/Meta-Llama-3-8B-Instruct
|
4 |
+
MAX_TOKENS = 768
|
5 |
TEMPERATURE = 0.2
|
6 |
+
INFERENCE_PROVIDER = novita
|
7 |
ORGANIZATION = GIZ
|
8 |
|
9 |
[reader]
|
utils/__pycache__/generator.cpython-310.pyc
CHANGED
Binary files a/utils/__pycache__/generator.cpython-310.pyc and b/utils/__pycache__/generator.cpython-310.pyc differ
|
|