Spaces:
GIZ
/
Running on CPU Upgrade

Asistente_EUDR / model_params.cfg
leavoigt's picture
add generator
773f59c
raw
history blame
544 Bytes
[generator]
PROVIDER = huggingface
MODEL = meta-llama/Meta-Llama-3-8B-Instruct
MAX_TOKENS = 512
TEMPERATURE = 0.2
[reader]
TYPE = INF_PROVIDERS
INF_PROVIDER_MODEL = meta-llama/Llama-3.1-8B-Instruct
DEDICATED_MODEL = meta-llama/Llama-3.1-8B-Instruct
DEDICATED_ENDPOINT = https://qu2d8m6dmsollhly.us-east-1.aws.endpoints.huggingface.cloud
NVIDIA_MODEL = meta-llama/Llama-3.1-8B-Instruct
NVIDIA_ENDPOINT = https://huggingface.co/api/integrations/dgx/v1
MAX_TOKENS = 768
INF_PROVIDER = nebius
[app]
dropdown_default = Annual Consolidated OAG 2024