if001 commited on
Commit
d0a6395
·
verified ·
1 Parent(s): 87a3c94

Upload GemmaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -16,7 +16,7 @@
16
  "max_position_embeddings": 1024,
17
  "model_type": "gemma",
18
  "num_attention_heads": 4,
19
- "num_hidden_layers": 2,
20
  "num_key_value_heads": 1,
21
  "pad_token_id": 0,
22
  "rms_norm_eps": 1e-05,
 
16
  "max_position_embeddings": 1024,
17
  "model_type": "gemma",
18
  "num_attention_heads": 4,
19
+ "num_hidden_layers": 4,
20
  "num_key_value_heads": 1,
21
  "pad_token_id": 0,
22
  "rms_norm_eps": 1e-05,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d079245fdfcfa7a7ea008ad1a1d2906efa67c3f4877f62dc7ca5454d8ec7b45
3
- size 915429664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df30b39f3e08d98edbcf45be0314a0daf25c748fb70c6c3da1b52b93ac356927
3
+ size 1037082896