umerghafoor commited on
Commit
41f3f05
·
verified ·
1 Parent(s): 3a77bc8

Upload GPTBigCodeForCausalLM

Browse files
Files changed (3) hide show
  1. README.md +3 -1
  2. config.json +1 -3
  3. model.safetensors +1 -1
README.md CHANGED
@@ -1,6 +1,8 @@
1
  ---
2
  library_name: transformers
3
- tags: []
 
 
4
  ---
5
 
6
  # Model Card for Model ID
 
1
  ---
2
  library_name: transformers
3
+ tags:
4
+ - trl
5
+ - reward-trainer
6
  ---
7
 
8
  # Model Card for Model ID
config.json CHANGED
@@ -7,7 +7,6 @@
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 0,
9
  "embd_pdrop": 0.1,
10
- "end_token_id": 0,
11
  "eos_token_id": 0,
12
  "inference_runner": 0,
13
  "initializer_range": 0.02,
@@ -22,7 +21,6 @@
22
  "n_layer": 20,
23
  "n_positions": 8192,
24
  "pad_key_length": true,
25
- "pad_token_id": 0,
26
  "pre_allocate_kv_cache": false,
27
  "resid_pdrop": 0.1,
28
  "scale_attention_softmax_in_fp32": true,
@@ -34,7 +32,7 @@
34
  "summary_use_proj": true,
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.50.0",
37
- "use_cache": false,
38
  "validate_runner_input": true,
39
  "vocab_size": 49152
40
  }
 
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 0,
9
  "embd_pdrop": 0.1,
 
10
  "eos_token_id": 0,
11
  "inference_runner": 0,
12
  "initializer_range": 0.02,
 
21
  "n_layer": 20,
22
  "n_positions": 8192,
23
  "pad_key_length": true,
 
24
  "pre_allocate_kv_cache": false,
25
  "resid_pdrop": 0.1,
26
  "scale_attention_softmax_in_fp32": true,
 
32
  "summary_use_proj": true,
33
  "torch_dtype": "float32",
34
  "transformers_version": "4.50.0",
35
+ "use_cache": true,
36
  "validate_runner_input": true,
37
  "vocab_size": 49152
38
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f11078341768fe97bd260cfdddb565d7a4d7dec5831e96953d5005e4f6bc0a4a
3
  size 656601304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4534e232c3f20db1774695ec7a8c5ef0506799b0377996be5dbce2f3044c65ae
3
  size 656601304