BoyaWu10 commited on
Commit
9f67944
·
1 Parent(s): ef3869e

Update model

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "Isaachhe/phi-2_dev",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,8 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "query_key_value",
23
  "fc1",
 
 
 
24
  "dense",
25
  "fc2"
26
  ],
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "microsoft/phi-2",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "fc1",
23
+ "q_proj",
24
+ "k_proj",
25
+ "v_proj",
26
  "dense",
27
  "fc2"
28
  ],
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:29d0e6e9a71dd29aece673043fd727fb772dd00815cb82f0a4f0f7e503797942
3
- size 335579120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38ba97037e32366eb3a5b831aa0807a533b4ba56975b49d314eaf793665f768a
3
+ size 377539280
config.json CHANGED
@@ -4,6 +4,10 @@
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
 
 
 
7
  "bos_token_id": 50256,
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 50256,
@@ -22,8 +26,8 @@
22
  "model_type": "bunny-phi",
23
  "num_attention_heads": 32,
24
  "num_hidden_layers": 32,
 
25
  "partial_rotary_factor": 0.4,
26
- "pretraining_tp": 1,
27
  "qk_layernorm": false,
28
  "resid_pdrop": 0.1,
29
  "rope_scaling": null,
 
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi.PhiConfig",
9
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
10
+ },
11
  "bos_token_id": 50256,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": 50256,
 
26
  "model_type": "bunny-phi",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
+ "num_key_value_heads": 32,
30
  "partial_rotary_factor": 0.4,
 
31
  "qk_layernorm": false,
32
  "resid_pdrop": 0.1,
33
  "rope_scaling": null,
non_lora_trainables.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd0abfe41d1ffce799130123cfda6aad65743cf6cb9973f4dddfa111a3196588
3
  size 18362544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17ba166919e7778444751a48b78efae3278f7320da8d50b7e325adf0ac153548
3
  size 18362544
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff