kevin009 commited on
Commit
e4b3010
·
verified ·
1 Parent(s): f314dd3

MistralForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -15,7 +15,7 @@
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
- "rms_norm_eps": 1e-05,
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
 
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
+ "rms_norm_eps": 2e-06,
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,