File size: 430 Bytes
b339871
539065a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b339871
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
  "host": "0.0.0.0",
  "port": 8080,
  "models": [
    {
      "model": "llava-v1.6-mistral-7b.Q3_K_XS.gguf",
      "model_alias": "llava-1.6",
      "chat_format": "llava-1-5",
      "clip_model_path": "mmproj-model-f16.gguf",
      "n_gpu_layers": -1,
      "offload_kqv": false,  // Assuming default value, adjust as needed
      "n_threads": 12,
      "n_batch": 1,
      "n_ctx": 2048,
      "logits_all": true
    }
  ]
}