diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..a6344aac8c09253b3b630fb776ae94478aa0275b --- /dev/null +++ b/.gitattributes @@ -0,0 +1,35 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/checkpoints/checkpoint-100/config.json b/checkpoints/checkpoint-100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-100/model.safetensors b/checkpoints/checkpoint-100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2f6e63dae004b2d8ec17fa2fa40a884c72fa90c7 --- /dev/null +++ b/checkpoints/checkpoint-100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:091d1474cbb537e6bbe54b7cba183bbd4302acf95c5ca594348b4dd0f29a84bf +size 324662984 diff --git a/checkpoints/checkpoint-100/training_args.bin b/checkpoints/checkpoint-100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1000/config.json b/checkpoints/checkpoint-1000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1000/model.safetensors b/checkpoints/checkpoint-1000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..82a8b83427bac865e4ec23b2a794d029a3f1bc50 --- /dev/null +++ b/checkpoints/checkpoint-1000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3af65001b2df1d7aa05097c62001dbcbfe5b950f41419cd4655d4b801acf6149 +size 324662984 diff --git a/checkpoints/checkpoint-1000/training_args.bin b/checkpoints/checkpoint-1000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-10000/config.json b/checkpoints/checkpoint-10000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-10000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-10000/model.safetensors b/checkpoints/checkpoint-10000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f1ad0772416d8f6d086e5522c82dad5665fb23ee --- /dev/null +++ b/checkpoints/checkpoint-10000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4d1f284240ac9ce60b1392b020805cb24e2b923e74f33fa362de8cfc19679b6 +size 324662984 diff --git a/checkpoints/checkpoint-10000/training_args.bin b/checkpoints/checkpoint-10000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-10000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-100000/config.json b/checkpoints/checkpoint-100000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-100000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-100000/model.safetensors b/checkpoints/checkpoint-100000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8d2dc5c1a24c4431bd67d9184e8bf5d8f51bff78 --- /dev/null +++ b/checkpoints/checkpoint-100000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:865ea2133301ad30c814dc2cbf48fc21135e4e1fd4008fb1c6b43073d7165845 +size 324662984 diff --git a/checkpoints/checkpoint-100000/training_args.bin b/checkpoints/checkpoint-100000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-100000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-10200/config.json b/checkpoints/checkpoint-10200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-10200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-10200/model.safetensors b/checkpoints/checkpoint-10200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fec91bbe789177ff532bfa050d3e2887a45b9954 --- /dev/null +++ b/checkpoints/checkpoint-10200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea6ff7466539c09e743ab87b298b71ad9e5464a1a3042177a315ff303b29e50e +size 324662984 diff --git a/checkpoints/checkpoint-10200/training_args.bin b/checkpoints/checkpoint-10200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-10200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-102000/config.json b/checkpoints/checkpoint-102000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-102000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-102000/model.safetensors b/checkpoints/checkpoint-102000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d89cbf1baf3b45c16886775b188a17b775ad098f --- /dev/null +++ b/checkpoints/checkpoint-102000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a966f69917ede86380febf310ccd5cdba623c5c394b1fd11904e154b7a65c54 +size 324662984 diff --git a/checkpoints/checkpoint-102000/training_args.bin b/checkpoints/checkpoint-102000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-102000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-10400/config.json b/checkpoints/checkpoint-10400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-10400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-10400/model.safetensors b/checkpoints/checkpoint-10400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..902aae5d7c0bf1f6678ce9a4cda88d8a37d286ca --- /dev/null +++ b/checkpoints/checkpoint-10400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2239fdddefa36ab4b4d8493b727787fd44af8b72d5b88b07c7dab4583a68b0f +size 324662984 diff --git a/checkpoints/checkpoint-10400/training_args.bin b/checkpoints/checkpoint-10400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-10400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-104000/config.json b/checkpoints/checkpoint-104000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-104000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-104000/model.safetensors b/checkpoints/checkpoint-104000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..67388a476797172a3014663bf30db1f6d8c1e731 --- /dev/null +++ b/checkpoints/checkpoint-104000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10d50bba2f464733f9f6b4aa3bdf71599bafb4ef4f7cf8e3cc5c206b104f7c46 +size 324662984 diff --git a/checkpoints/checkpoint-104000/training_args.bin b/checkpoints/checkpoint-104000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-104000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-10600/config.json b/checkpoints/checkpoint-10600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-10600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-10600/model.safetensors b/checkpoints/checkpoint-10600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..05b37eef150db59a09649f6c8a3f787077a34f25 --- /dev/null +++ b/checkpoints/checkpoint-10600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9708a16779c726d8080525b741eeab6ddf951985b8af7152428df1c55934b853 +size 324662984 diff --git a/checkpoints/checkpoint-10600/training_args.bin b/checkpoints/checkpoint-10600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-10600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-106000/config.json b/checkpoints/checkpoint-106000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-106000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-106000/model.safetensors b/checkpoints/checkpoint-106000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7076785b991a4facdab5d1db1a67550664108161 --- /dev/null +++ b/checkpoints/checkpoint-106000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f4c0b84727b91fd39ec157ff163e070f76511f457cc3991681a1f9379551028 +size 324662984 diff --git a/checkpoints/checkpoint-106000/training_args.bin b/checkpoints/checkpoint-106000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-106000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-10800/config.json b/checkpoints/checkpoint-10800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-10800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-10800/model.safetensors b/checkpoints/checkpoint-10800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2b2557f891012c4a2a7f810b77a2ea4ece967d80 --- /dev/null +++ b/checkpoints/checkpoint-10800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ea932ce8c955581505a875892eb511d3bf41f173f90f54f9afe37fcac1200f5 +size 324662984 diff --git a/checkpoints/checkpoint-10800/training_args.bin b/checkpoints/checkpoint-10800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-10800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-108000/config.json b/checkpoints/checkpoint-108000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-108000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-108000/model.safetensors b/checkpoints/checkpoint-108000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..85fb574445a918f8008e2928ddcf42cdf523daf1 --- /dev/null +++ b/checkpoints/checkpoint-108000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:601e60dbdee92703deae4062838b34cdef03e2cf019f41ac434edde21587a234 +size 324662984 diff --git a/checkpoints/checkpoint-108000/training_args.bin b/checkpoints/checkpoint-108000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-108000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1100/config.json b/checkpoints/checkpoint-1100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1100/model.safetensors b/checkpoints/checkpoint-1100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ab943edb2c5fedf375d69a9eb5c2d0cb2475409d --- /dev/null +++ b/checkpoints/checkpoint-1100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8c860f0a48492875a266d2126e7f74ba82b96cd84ad6ec9dd66b682cd9ff335 +size 324662984 diff --git a/checkpoints/checkpoint-1100/training_args.bin b/checkpoints/checkpoint-1100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-11000/config.json b/checkpoints/checkpoint-11000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-11000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-11000/model.safetensors b/checkpoints/checkpoint-11000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..231160846a83941a39c7ff22bbbb67917c51f85c --- /dev/null +++ b/checkpoints/checkpoint-11000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b0ea9baf8d066e6db2730073a7eb9c771776e6c1ee23dafb035b6383dd2555e +size 324662984 diff --git a/checkpoints/checkpoint-11000/training_args.bin b/checkpoints/checkpoint-11000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-11000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-110000/config.json b/checkpoints/checkpoint-110000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-110000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-110000/model.safetensors b/checkpoints/checkpoint-110000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2d6d79baa458233519c612db7570cc060ae27490 --- /dev/null +++ b/checkpoints/checkpoint-110000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d0842441145b1dbad4d4a60f0d8a462673032c2f645dda532c4be9ffe6a2d8b +size 324662984 diff --git a/checkpoints/checkpoint-110000/training_args.bin b/checkpoints/checkpoint-110000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-110000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-11200/config.json b/checkpoints/checkpoint-11200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-11200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-11200/model.safetensors b/checkpoints/checkpoint-11200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..543b296373a074ed322ae9aa598bbe3381e283a3 --- /dev/null +++ b/checkpoints/checkpoint-11200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01bd627aba063c73e5d1fb4e7c9bd9884f073046822792f93540ca56e36a489a +size 324662984 diff --git a/checkpoints/checkpoint-11200/training_args.bin b/checkpoints/checkpoint-11200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-11200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-112000/config.json b/checkpoints/checkpoint-112000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-112000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-112000/model.safetensors b/checkpoints/checkpoint-112000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0c79ed9592f6731f7c9374b5330ecc045db50593 --- /dev/null +++ b/checkpoints/checkpoint-112000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f546901a9bcbe87cc1ff9bfbcd2c50952cebc841c4e3deef6b0665498103d7e +size 324662984 diff --git a/checkpoints/checkpoint-112000/training_args.bin b/checkpoints/checkpoint-112000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-112000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-11400/config.json b/checkpoints/checkpoint-11400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-11400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-11400/model.safetensors b/checkpoints/checkpoint-11400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..978ab24c64853c91248ed24cdd9994d17ac44b30 --- /dev/null +++ b/checkpoints/checkpoint-11400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b2878fd764be421c5a3c9534ff7d2e3c7aa428a1e6cf837f5284b1cd362aafc +size 324662984 diff --git a/checkpoints/checkpoint-11400/training_args.bin b/checkpoints/checkpoint-11400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-11400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-114000/config.json b/checkpoints/checkpoint-114000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-114000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-114000/model.safetensors b/checkpoints/checkpoint-114000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7d16fd4ed3234ade51bfbae487517a19e4ce1d60 --- /dev/null +++ b/checkpoints/checkpoint-114000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b5f4a37c69fb2556b1c1d03b69b7badaab14d265f1f5e4f44bfea3bb99fe6d7 +size 324662984 diff --git a/checkpoints/checkpoint-114000/training_args.bin b/checkpoints/checkpoint-114000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-114000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-11600/config.json b/checkpoints/checkpoint-11600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-11600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-11600/model.safetensors b/checkpoints/checkpoint-11600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..37ba47b12324e0b2539c9931e321cef558dee7b1 --- /dev/null +++ b/checkpoints/checkpoint-11600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18ea38ea1e187a73aaf30e7c5ad51dde2a605e3b884adff971b0116597cf96e0 +size 324662984 diff --git a/checkpoints/checkpoint-11600/training_args.bin b/checkpoints/checkpoint-11600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-11600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-116000/config.json b/checkpoints/checkpoint-116000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-116000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-116000/model.safetensors b/checkpoints/checkpoint-116000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c72b725b69f17bdec14b510c3d74e47b24740a6f --- /dev/null +++ b/checkpoints/checkpoint-116000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df4250248a697a142f3b174ad71260caff34b74dda29bdcebf7e8933431f56a9 +size 324662984 diff --git a/checkpoints/checkpoint-116000/training_args.bin b/checkpoints/checkpoint-116000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-116000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-11800/config.json b/checkpoints/checkpoint-11800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-11800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-11800/model.safetensors b/checkpoints/checkpoint-11800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cfadd65f39881163bc066960e4d8a52e6b02fcc4 --- /dev/null +++ b/checkpoints/checkpoint-11800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b74532a578035526d4e55d473835d563e7a87d48c39927c579faec7da598eb6 +size 324662984 diff --git a/checkpoints/checkpoint-11800/training_args.bin b/checkpoints/checkpoint-11800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-11800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-118000/config.json b/checkpoints/checkpoint-118000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-118000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-118000/model.safetensors b/checkpoints/checkpoint-118000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3121b90672a5de90ea79eeb7fb7ab9bf0f107b74 --- /dev/null +++ b/checkpoints/checkpoint-118000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89735741d78e0575714ff53672976b2ae1795aba5810f1ce69c4980296fe31a8 +size 324662984 diff --git a/checkpoints/checkpoint-118000/training_args.bin b/checkpoints/checkpoint-118000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-118000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1200/config.json b/checkpoints/checkpoint-1200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1200/model.safetensors b/checkpoints/checkpoint-1200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..588db1b677924d72c3bd78ed3b25ec469a0e21ff --- /dev/null +++ b/checkpoints/checkpoint-1200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b9911b651ef32dac118998ae42c7c057c4abdafddeddf11d8faa7763f530948e +size 324662984 diff --git a/checkpoints/checkpoint-1200/training_args.bin b/checkpoints/checkpoint-1200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-12000/config.json b/checkpoints/checkpoint-12000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-12000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-12000/model.safetensors b/checkpoints/checkpoint-12000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..68b14e2b1602b459c966bcce31ec6d7f066e2a7a --- /dev/null +++ b/checkpoints/checkpoint-12000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66eeb2c8034297f092f8af49d564c51d8ca4b5ba0afef65430d169db8c43e026 +size 324662984 diff --git a/checkpoints/checkpoint-12000/training_args.bin b/checkpoints/checkpoint-12000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-12000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-120000/config.json b/checkpoints/checkpoint-120000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-120000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-120000/model.safetensors b/checkpoints/checkpoint-120000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..de464a15022cdcba6105449368dc14ef4963d15f --- /dev/null +++ b/checkpoints/checkpoint-120000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f308b6185a33abc356a2dbd4f9cd180e99774b03f05dfad23aee61f6d5707cc +size 324662984 diff --git a/checkpoints/checkpoint-120000/training_args.bin b/checkpoints/checkpoint-120000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-120000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-12200/config.json b/checkpoints/checkpoint-12200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-12200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-12200/model.safetensors b/checkpoints/checkpoint-12200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7f9d601ea83428fe7768814eef9644a4912477ef --- /dev/null +++ b/checkpoints/checkpoint-12200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cecf18da4031e000fe642433d2b2da93ab16065e99cd0885d865d3faa6f0f14 +size 324662984 diff --git a/checkpoints/checkpoint-12200/training_args.bin b/checkpoints/checkpoint-12200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-12200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-122000/config.json b/checkpoints/checkpoint-122000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-122000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-122000/model.safetensors b/checkpoints/checkpoint-122000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8154f7a9f5061d1d0ca723bc5c580bed6e2ae0ea --- /dev/null +++ b/checkpoints/checkpoint-122000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bae7601fd8df15576ce48238edd8ca25315dc85de07739634176bceb4979c935 +size 324662984 diff --git a/checkpoints/checkpoint-122000/training_args.bin b/checkpoints/checkpoint-122000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-122000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-12400/config.json b/checkpoints/checkpoint-12400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-12400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-12400/model.safetensors b/checkpoints/checkpoint-12400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0c08c67728dd812dbc515b6ce4dfc973bedd8923 --- /dev/null +++ b/checkpoints/checkpoint-12400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cef5825145421a6e614c885cc1083cbd87eaddc04511c2854dcd186449eb145 +size 324662984 diff --git a/checkpoints/checkpoint-12400/training_args.bin b/checkpoints/checkpoint-12400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-12400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-124000/config.json b/checkpoints/checkpoint-124000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-124000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-124000/model.safetensors b/checkpoints/checkpoint-124000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ed54e079e4887f4148bff7244bb39fea3d37272e --- /dev/null +++ b/checkpoints/checkpoint-124000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f26566c56713416737f61edde7c755157a1c1be7fe7ea6cc2ff77c574a9d4380 +size 324662984 diff --git a/checkpoints/checkpoint-124000/training_args.bin b/checkpoints/checkpoint-124000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-124000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-12600/config.json b/checkpoints/checkpoint-12600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-12600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-12600/model.safetensors b/checkpoints/checkpoint-12600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..09ce0f9167180d743d0b3a81684def732bcddbc0 --- /dev/null +++ b/checkpoints/checkpoint-12600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55a40e5a7ff6b3542ee8735a6834437242d8fad539503e3d37f2dd0a44b7c281 +size 324662984 diff --git a/checkpoints/checkpoint-12600/training_args.bin b/checkpoints/checkpoint-12600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-12600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-126000/config.json b/checkpoints/checkpoint-126000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-126000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-126000/model.safetensors b/checkpoints/checkpoint-126000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2a7b7f5a53314e777520dc28024a3e3a895e0ca1 --- /dev/null +++ b/checkpoints/checkpoint-126000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a77a13a86dc147d2efdccf02073bbb8b0af56dd08ab91c914504e21d055edcc0 +size 324662984 diff --git a/checkpoints/checkpoint-126000/training_args.bin b/checkpoints/checkpoint-126000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-126000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-12800/config.json b/checkpoints/checkpoint-12800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-12800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-12800/model.safetensors b/checkpoints/checkpoint-12800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bcdf2c22179cb26dc9057acdff424cb40dc364c0 --- /dev/null +++ b/checkpoints/checkpoint-12800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aeda93347c03e6be52108b8a393a8bc9ced33a6a25f38aa37b413bc8b57fe567 +size 324662984 diff --git a/checkpoints/checkpoint-12800/training_args.bin b/checkpoints/checkpoint-12800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-12800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-128000/config.json b/checkpoints/checkpoint-128000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-128000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-128000/model.safetensors b/checkpoints/checkpoint-128000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a2d11f1f5db8472643bd658e882e208f237e8397 --- /dev/null +++ b/checkpoints/checkpoint-128000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cea649582663cff338094dbd0784266c332e053a4de256da2db22bc3fdadeb7b +size 324662984 diff --git a/checkpoints/checkpoint-128000/training_args.bin b/checkpoints/checkpoint-128000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-128000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1300/config.json b/checkpoints/checkpoint-1300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1300/model.safetensors b/checkpoints/checkpoint-1300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..374e9c84fb5fde1ac89e4b32c60075b1080e0178 --- /dev/null +++ b/checkpoints/checkpoint-1300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:80a7de901c6db8aeb3db6c652a3bcdf5ce55700ef6492e30487758559282c3ac +size 324662984 diff --git a/checkpoints/checkpoint-1300/training_args.bin b/checkpoints/checkpoint-1300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-13000/config.json b/checkpoints/checkpoint-13000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-13000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-13000/model.safetensors b/checkpoints/checkpoint-13000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..744855f488f0b398502df1bb4f9a533cad0e9885 --- /dev/null +++ b/checkpoints/checkpoint-13000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e8c74d1972160267525af505a1b4f81ac734c795ab26bf1f1f13257c5c530c6 +size 324662984 diff --git a/checkpoints/checkpoint-13000/training_args.bin b/checkpoints/checkpoint-13000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-13000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-130000/config.json b/checkpoints/checkpoint-130000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-130000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-130000/model.safetensors b/checkpoints/checkpoint-130000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2dc4c49bae97cacf17b95aae8d4b4d25e61fdda9 --- /dev/null +++ b/checkpoints/checkpoint-130000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d32936b5d08f1f28523a1b514529ff31d1bdb94c923a3252e4faaf0344b3c105 +size 324662984 diff --git a/checkpoints/checkpoint-130000/training_args.bin b/checkpoints/checkpoint-130000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-130000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-13200/config.json b/checkpoints/checkpoint-13200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-13200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-13200/model.safetensors b/checkpoints/checkpoint-13200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..097cd17adf877b892723d0d72dab94f98a831e66 --- /dev/null +++ b/checkpoints/checkpoint-13200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ded3ff43da783c1f0bb87f47b5a4bbdaf180b3d1aab672ce26419461289b50e2 +size 324662984 diff --git a/checkpoints/checkpoint-13200/training_args.bin b/checkpoints/checkpoint-13200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-13200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-132000/config.json b/checkpoints/checkpoint-132000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-132000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-132000/model.safetensors b/checkpoints/checkpoint-132000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..af05cdddbe34f0b5d59168225472eda75d61fadf --- /dev/null +++ b/checkpoints/checkpoint-132000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35c0fc9e6856ea7a95b4c39cc4982d91bf670416c220b5bd205c74eb422cafca +size 324662984 diff --git a/checkpoints/checkpoint-132000/training_args.bin b/checkpoints/checkpoint-132000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-132000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-13400/config.json b/checkpoints/checkpoint-13400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-13400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-13400/model.safetensors b/checkpoints/checkpoint-13400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c18005a40913322f7fda2c190394e1874fe7e421 --- /dev/null +++ b/checkpoints/checkpoint-13400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91a6e80a91604760ca24c914275dcf403e2c12d434e0720e451757a85f015ad6 +size 324662984 diff --git a/checkpoints/checkpoint-13400/training_args.bin b/checkpoints/checkpoint-13400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-13400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-134000/config.json b/checkpoints/checkpoint-134000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-134000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-134000/model.safetensors b/checkpoints/checkpoint-134000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..585aedcf1acbb4a68013617616ef82a7c9cc5d09 --- /dev/null +++ b/checkpoints/checkpoint-134000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e94e033eacf2eecb2c533f6b4463fd7bcf00f6069c574c828c7fe041ed8999ab +size 324662984 diff --git a/checkpoints/checkpoint-134000/training_args.bin b/checkpoints/checkpoint-134000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-134000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-13600/config.json b/checkpoints/checkpoint-13600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-13600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-13600/model.safetensors b/checkpoints/checkpoint-13600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3dfd5e06067bbba9c5bfd11dc37aaff3564acb3b --- /dev/null +++ b/checkpoints/checkpoint-13600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7df5efcb33a9b364a9656c0b3fc643bffe2c15ee450252bf353badfd519abe58 +size 324662984 diff --git a/checkpoints/checkpoint-13600/training_args.bin b/checkpoints/checkpoint-13600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-13600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-136000/config.json b/checkpoints/checkpoint-136000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-136000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-136000/model.safetensors b/checkpoints/checkpoint-136000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..55a80f2f3ea011612e9d353278d52da79f9e87a6 --- /dev/null +++ b/checkpoints/checkpoint-136000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:596c1a1f5c22f2f2900fcd4d95a20836efab4f931e68ab0523148c7783d443f4 +size 324662984 diff --git a/checkpoints/checkpoint-136000/training_args.bin b/checkpoints/checkpoint-136000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-136000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-13800/config.json b/checkpoints/checkpoint-13800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-13800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-13800/model.safetensors b/checkpoints/checkpoint-13800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d838aeee16bc79c97b6a71a8e2330c0d1b29569c --- /dev/null +++ b/checkpoints/checkpoint-13800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b437ac2289de8ba921d6a29cc98598e3af4e7ca97d1928013ca9151fdbf2b511 +size 324662984 diff --git a/checkpoints/checkpoint-13800/training_args.bin b/checkpoints/checkpoint-13800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-13800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-138000/config.json b/checkpoints/checkpoint-138000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-138000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-138000/model.safetensors b/checkpoints/checkpoint-138000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..64140b3276bb8f840b308de91634a0a7bca2ce94 --- /dev/null +++ b/checkpoints/checkpoint-138000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a28637eb3935e425f8af769802f0e230e88fac2b04e3d8ec2199ec8cdfa8c13c +size 324662984 diff --git a/checkpoints/checkpoint-138000/training_args.bin b/checkpoints/checkpoint-138000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-138000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1400/config.json b/checkpoints/checkpoint-1400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1400/model.safetensors b/checkpoints/checkpoint-1400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cbd6ae244f60f7d22b69e0aa19ca9af8c96842a3 --- /dev/null +++ b/checkpoints/checkpoint-1400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4fcd8123612e6426dc2e1044639bf75001246a7c8661ebe944814815a997da8 +size 324662984 diff --git a/checkpoints/checkpoint-1400/training_args.bin b/checkpoints/checkpoint-1400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-14000/config.json b/checkpoints/checkpoint-14000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-14000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-14000/model.safetensors b/checkpoints/checkpoint-14000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..46edeba1cd65c81c46100a94cc9e1b3b8f2753ca --- /dev/null +++ b/checkpoints/checkpoint-14000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03e92ede6bc18f47af80bde0d43106781a49145f04a64cbf9eda3789649d0b88 +size 324662984 diff --git a/checkpoints/checkpoint-14000/training_args.bin b/checkpoints/checkpoint-14000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-14000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-140000/config.json b/checkpoints/checkpoint-140000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-140000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-140000/model.safetensors b/checkpoints/checkpoint-140000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7511fc0303f7188ba362eb800ad695a6ed1fa8f0 --- /dev/null +++ b/checkpoints/checkpoint-140000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:317124d9c508629a7b6b804097df6733134e2e1d6670dfa02f01109c1994e15a +size 324662984 diff --git a/checkpoints/checkpoint-140000/training_args.bin b/checkpoints/checkpoint-140000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-140000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-14200/config.json b/checkpoints/checkpoint-14200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-14200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-14200/model.safetensors b/checkpoints/checkpoint-14200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..82d6d2a3d7d846b84f9f957196f7666d344abf1b --- /dev/null +++ b/checkpoints/checkpoint-14200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21ee8259b22d6adf98ff8c24dd6c5a35810ce58bdbad67b5b82b0c6e8a14f6f8 +size 324662984 diff --git a/checkpoints/checkpoint-14200/training_args.bin b/checkpoints/checkpoint-14200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-14200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-142000/config.json b/checkpoints/checkpoint-142000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-142000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-142000/model.safetensors b/checkpoints/checkpoint-142000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4fd6bd1ec31aea6a3c0ea78c7be17bc539b4153d --- /dev/null +++ b/checkpoints/checkpoint-142000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ecc19dfd20bf6c2423fa2be106fa6a7d9ebc6af019b3c8117faa1bb55c93c3a2 +size 324662984 diff --git a/checkpoints/checkpoint-142000/training_args.bin b/checkpoints/checkpoint-142000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-142000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-14400/config.json b/checkpoints/checkpoint-14400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-14400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-14400/model.safetensors b/checkpoints/checkpoint-14400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..659c2ef8e75299fac7f486b18601da696ede6228 --- /dev/null +++ b/checkpoints/checkpoint-14400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1177ff3e8a87253db4e92c44e4a70a2f4eb676c434b1d78386b1fd6588ecfe32 +size 324662984 diff --git a/checkpoints/checkpoint-14400/training_args.bin b/checkpoints/checkpoint-14400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-14400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-144000/config.json b/checkpoints/checkpoint-144000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-144000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-144000/model.safetensors b/checkpoints/checkpoint-144000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6f98a3a7e302d2f09ffecda716d7653b36a3da34 --- /dev/null +++ b/checkpoints/checkpoint-144000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65f2036cc620ce16e280679432e453a8470e76a991de39baece5fdd526d2cc34 +size 324662984 diff --git a/checkpoints/checkpoint-144000/training_args.bin b/checkpoints/checkpoint-144000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-144000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-14600/config.json b/checkpoints/checkpoint-14600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-14600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-14600/model.safetensors b/checkpoints/checkpoint-14600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0d68cb82193c73863ffd7f5539dd3a3d1517413f --- /dev/null +++ b/checkpoints/checkpoint-14600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a00d78d134959ce1c251c7ecf88fcd64f68c75bd2fa0b9d3e614689b004db49 +size 324662984 diff --git a/checkpoints/checkpoint-14600/training_args.bin b/checkpoints/checkpoint-14600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-14600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-146000/config.json b/checkpoints/checkpoint-146000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-146000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-146000/model.safetensors b/checkpoints/checkpoint-146000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b5f630d4fcb758b8c16b6ceaba710ea1505a07c0 --- /dev/null +++ b/checkpoints/checkpoint-146000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c60d09d00ed9b62f8a987fcff4dfa9ff2afdbdd2c955209e03e805062e8bddcf +size 324662984 diff --git a/checkpoints/checkpoint-146000/training_args.bin b/checkpoints/checkpoint-146000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-146000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-14800/config.json b/checkpoints/checkpoint-14800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-14800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-14800/model.safetensors b/checkpoints/checkpoint-14800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c98b0355da1024ad6f9b73e71e9d828d55b0e990 --- /dev/null +++ b/checkpoints/checkpoint-14800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0a20edbcd9ab719ced26010d16d9ea19628d8979e4aea2a67448aa66f13b88c +size 324662984 diff --git a/checkpoints/checkpoint-14800/training_args.bin b/checkpoints/checkpoint-14800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-14800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-148000/config.json b/checkpoints/checkpoint-148000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-148000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-148000/model.safetensors b/checkpoints/checkpoint-148000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0104f048506aa4849751bd612a3ec65e61533cb4 --- /dev/null +++ b/checkpoints/checkpoint-148000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2e184103bc6b31b63d6e03233fbdcb40fe2d460cda455c64781e81f987528bf +size 324662984 diff --git a/checkpoints/checkpoint-148000/training_args.bin b/checkpoints/checkpoint-148000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-148000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1500/config.json b/checkpoints/checkpoint-1500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1500/model.safetensors b/checkpoints/checkpoint-1500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dcee919ffc102fd0c520e10013774c31f0a97c5e --- /dev/null +++ b/checkpoints/checkpoint-1500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76f680996d9b2931377a0e9ec1d4d590432a5eecbea61e05816966842422ade3 +size 324662984 diff --git a/checkpoints/checkpoint-1500/training_args.bin b/checkpoints/checkpoint-1500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-15000/config.json b/checkpoints/checkpoint-15000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-15000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-15000/model.safetensors b/checkpoints/checkpoint-15000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a10d163f909a672546968d80ea4c362e8ebabeae --- /dev/null +++ b/checkpoints/checkpoint-15000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e96feb4954f095d7b25041dd238b72be6b809027e6a31a4522cee4296240b5b4 +size 324662984 diff --git a/checkpoints/checkpoint-15000/training_args.bin b/checkpoints/checkpoint-15000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-15000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-150000/config.json b/checkpoints/checkpoint-150000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-150000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-150000/model.safetensors b/checkpoints/checkpoint-150000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..11999d11670821e014eea3cabbbb8204a5fb56e5 --- /dev/null +++ b/checkpoints/checkpoint-150000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a618078ae8f5d49d0c37cd70e82851027e2d8e78d7133c28bf53ef958fb894f1 +size 324662984 diff --git a/checkpoints/checkpoint-150000/training_args.bin b/checkpoints/checkpoint-150000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-150000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-15200/config.json b/checkpoints/checkpoint-15200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-15200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-15200/model.safetensors b/checkpoints/checkpoint-15200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c0fd34f65b3020b813ee018a6389f54684c34293 --- /dev/null +++ b/checkpoints/checkpoint-15200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:554226a0c17842925688b12405eb9965106e197706b3d7eea66c9fdab90c2476 +size 324662984 diff --git a/checkpoints/checkpoint-15200/training_args.bin b/checkpoints/checkpoint-15200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-15200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-152000/config.json b/checkpoints/checkpoint-152000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-152000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-152000/model.safetensors b/checkpoints/checkpoint-152000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c631e0f4b3ee44dbe627409de1ca4229fc56284e --- /dev/null +++ b/checkpoints/checkpoint-152000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:654b8e70b914d3f58ad9031a15fbc267352a2a80b5122d2ba2f6be0fec8e51b7 +size 324662984 diff --git a/checkpoints/checkpoint-152000/training_args.bin b/checkpoints/checkpoint-152000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-152000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-15400/config.json b/checkpoints/checkpoint-15400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-15400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-15400/model.safetensors b/checkpoints/checkpoint-15400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d273b22cdf4e27c35ea79bd3ea905d803924660d --- /dev/null +++ b/checkpoints/checkpoint-15400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0177922d68d8fbef12484470cad85cb39b342b8eca4f7b04fe59d34cd4af84bd +size 324662984 diff --git a/checkpoints/checkpoint-15400/training_args.bin b/checkpoints/checkpoint-15400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-15400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-154000/config.json b/checkpoints/checkpoint-154000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-154000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-154000/model.safetensors b/checkpoints/checkpoint-154000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..87ad761c5b2d33dbfeee88b01b34b0031b8baf5f --- /dev/null +++ b/checkpoints/checkpoint-154000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0514e34e14d373970a4881cceb578e73dd20cd4b98546f6c7f42b0bf1a76e57b +size 324662984 diff --git a/checkpoints/checkpoint-154000/training_args.bin b/checkpoints/checkpoint-154000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-154000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-15600/config.json b/checkpoints/checkpoint-15600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-15600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-15600/model.safetensors b/checkpoints/checkpoint-15600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2396dafb581c1ed74b95aa586a8d9ea5a45f9e12 --- /dev/null +++ b/checkpoints/checkpoint-15600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5eaaf36a20d044cc5f7c6d1634a1395080aab26b18e801a154c17e81d76066a4 +size 324662984 diff --git a/checkpoints/checkpoint-15600/training_args.bin b/checkpoints/checkpoint-15600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-15600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-156000/config.json b/checkpoints/checkpoint-156000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-156000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-156000/model.safetensors b/checkpoints/checkpoint-156000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4ce10d92d0c064da0e4ae75acc6bac41bb933c56 --- /dev/null +++ b/checkpoints/checkpoint-156000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:759996cc965016dad4dfc4f738224fe9c51136bf2e52553d580c53f60a907ec4 +size 324662984 diff --git a/checkpoints/checkpoint-156000/training_args.bin b/checkpoints/checkpoint-156000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-156000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-15800/config.json b/checkpoints/checkpoint-15800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-15800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-15800/model.safetensors b/checkpoints/checkpoint-15800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..697781841581e8a91c475e13d2df65ac08755209 --- /dev/null +++ b/checkpoints/checkpoint-15800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ec43c171b33de521d25625d40704632cea7f0501e1c58063d276c2321708b4c +size 324662984 diff --git a/checkpoints/checkpoint-15800/training_args.bin b/checkpoints/checkpoint-15800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-15800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-158000/config.json b/checkpoints/checkpoint-158000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-158000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-158000/model.safetensors b/checkpoints/checkpoint-158000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b463f6a2b373bbc3154714d3f2b8957c86fec3ad --- /dev/null +++ b/checkpoints/checkpoint-158000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93b444257b911bd879e1deeab18ca89377c259394ee86c5550d8d3df4c2204ca +size 324662984 diff --git a/checkpoints/checkpoint-158000/training_args.bin b/checkpoints/checkpoint-158000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-158000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1600/config.json b/checkpoints/checkpoint-1600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1600/model.safetensors b/checkpoints/checkpoint-1600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..076de0232c3cc41170745b51c5d86608ef53ce01 --- /dev/null +++ b/checkpoints/checkpoint-1600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33b7bac48f84403a42e5962e86b1c5c11263edb1aa12005a2f24d75835d981a2 +size 324662984 diff --git a/checkpoints/checkpoint-1600/training_args.bin b/checkpoints/checkpoint-1600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-16000/config.json b/checkpoints/checkpoint-16000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-16000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-16000/model.safetensors b/checkpoints/checkpoint-16000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0d83c476e012384a404b9a732968118536fcc257 --- /dev/null +++ b/checkpoints/checkpoint-16000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cd50819629e45f4841b1631334ee619afdc11bbde14a633f5e523650348ce300 +size 324662984 diff --git a/checkpoints/checkpoint-16000/training_args.bin b/checkpoints/checkpoint-16000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-16000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-160000/config.json b/checkpoints/checkpoint-160000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-160000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-160000/model.safetensors b/checkpoints/checkpoint-160000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c7e0492a54d7ca1ea225f99ab0d28f8bb6ccd070 --- /dev/null +++ b/checkpoints/checkpoint-160000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c536771138a02f63bd16d7c0e771c66ed85b545bde8f89c47a7c55f8f8eb8a02 +size 324662984 diff --git a/checkpoints/checkpoint-160000/training_args.bin b/checkpoints/checkpoint-160000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-160000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-16200/config.json b/checkpoints/checkpoint-16200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-16200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-16200/model.safetensors b/checkpoints/checkpoint-16200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..137696c4901109fbbe692125d7053a4fd0570375 --- /dev/null +++ b/checkpoints/checkpoint-16200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d5e0cdc1ef1069b054af50682a3521ec9ca40fa60ae411266c24cb74038b10f +size 324662984 diff --git a/checkpoints/checkpoint-16200/training_args.bin b/checkpoints/checkpoint-16200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-16200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-162000/config.json b/checkpoints/checkpoint-162000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-162000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-162000/model.safetensors b/checkpoints/checkpoint-162000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..04c7f9cf21ed060e526ad9bd1ebd58fdf53b1fb8 --- /dev/null +++ b/checkpoints/checkpoint-162000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d844034cb07bde0333fdddc451b862f2ed485289915f50150f8eb9418a8b5ae +size 324662984 diff --git a/checkpoints/checkpoint-162000/training_args.bin b/checkpoints/checkpoint-162000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-162000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-16400/config.json b/checkpoints/checkpoint-16400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-16400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-16400/model.safetensors b/checkpoints/checkpoint-16400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6cd064dda593b82d1b1474e3e6e665e8bf4281e5 --- /dev/null +++ b/checkpoints/checkpoint-16400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56a9acb8b5db648ac41ad8f6e7b5a04465c25f9032451394229ca252ea12aac4 +size 324662984 diff --git a/checkpoints/checkpoint-16400/training_args.bin b/checkpoints/checkpoint-16400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-16400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-164000/config.json b/checkpoints/checkpoint-164000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-164000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-164000/model.safetensors b/checkpoints/checkpoint-164000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d7b093384815654da40e3b3a4d872a74fdeace8e --- /dev/null +++ b/checkpoints/checkpoint-164000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4be013c5e2210026c4227c8652897b57e22ecdc599b7c6cf8fe4f7b93e73daeb +size 324662984 diff --git a/checkpoints/checkpoint-164000/training_args.bin b/checkpoints/checkpoint-164000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-164000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-16600/config.json b/checkpoints/checkpoint-16600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-16600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-16600/model.safetensors b/checkpoints/checkpoint-16600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..df046f2f742904bcc471fa04eb73db9d1ca79c07 --- /dev/null +++ b/checkpoints/checkpoint-16600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abfd45bc5f953d28acb3b46754d535306a7a73605b226d5cd64baf46541b7dd6 +size 324662984 diff --git a/checkpoints/checkpoint-16600/training_args.bin b/checkpoints/checkpoint-16600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-16600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-166000/config.json b/checkpoints/checkpoint-166000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-166000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-166000/model.safetensors b/checkpoints/checkpoint-166000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..95a5d325c8c6e42c2cec4b3ae77636677102e3ce --- /dev/null +++ b/checkpoints/checkpoint-166000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12ffefa7dc435235d5cacc17ba8af4b01c01f3ad851e9d1b6974c42518a977ae +size 324662984 diff --git a/checkpoints/checkpoint-166000/training_args.bin b/checkpoints/checkpoint-166000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-166000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-16800/config.json b/checkpoints/checkpoint-16800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-16800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-16800/model.safetensors b/checkpoints/checkpoint-16800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..05369f2a1e93f896ecd411cd2bd70871332607c4 --- /dev/null +++ b/checkpoints/checkpoint-16800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a6f8f1e77be7caa02c43a19cf07d04791423b253d6484d6d6f185258aa0beec +size 324662984 diff --git a/checkpoints/checkpoint-16800/training_args.bin b/checkpoints/checkpoint-16800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-16800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-168000/config.json b/checkpoints/checkpoint-168000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-168000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-168000/model.safetensors b/checkpoints/checkpoint-168000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c8b256118e9186c52d7bb57342405cd0910bf076 --- /dev/null +++ b/checkpoints/checkpoint-168000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a817c80004d2bfaa759bb02b025be90c149aed9eec06b3fe2a646a927910999 +size 324662984 diff --git a/checkpoints/checkpoint-168000/training_args.bin b/checkpoints/checkpoint-168000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-168000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1700/config.json b/checkpoints/checkpoint-1700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1700/model.safetensors b/checkpoints/checkpoint-1700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4360422f9848ef8f1c0106cf093a0a8ad788fcf7 --- /dev/null +++ b/checkpoints/checkpoint-1700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28d11ca3481db82a74a0751c0d83a13cf47338cd0328dd2cbdac2b0e715052c6 +size 324662984 diff --git a/checkpoints/checkpoint-1700/training_args.bin b/checkpoints/checkpoint-1700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-17000/config.json b/checkpoints/checkpoint-17000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-17000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-17000/model.safetensors b/checkpoints/checkpoint-17000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..eeac46fb0b963ba88258cb79e7aaad1550c6f482 --- /dev/null +++ b/checkpoints/checkpoint-17000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30ef68be6e3760d4bbdae04bdcf7da446c225bb43ad16054a6c32c7d268cfe38 +size 324662984 diff --git a/checkpoints/checkpoint-17000/training_args.bin b/checkpoints/checkpoint-17000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-17000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-170000/config.json b/checkpoints/checkpoint-170000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-170000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-170000/model.safetensors b/checkpoints/checkpoint-170000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fc665b51aa6a0a422004403f119a0454bb374d3e --- /dev/null +++ b/checkpoints/checkpoint-170000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4950a08fc1b47e5ea76c76c373cc00509c42f009a456b0661ec84f20f624a984 +size 324662984 diff --git a/checkpoints/checkpoint-170000/training_args.bin b/checkpoints/checkpoint-170000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-170000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-17200/config.json b/checkpoints/checkpoint-17200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-17200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-17200/model.safetensors b/checkpoints/checkpoint-17200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a68cb0f23495c04188b6ccb4ecf20955ea023a21 --- /dev/null +++ b/checkpoints/checkpoint-17200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2cb0e19ba3d274e73b4181e24ca35e713526843e38e4bb7b161e2cb1b8b9730 +size 324662984 diff --git a/checkpoints/checkpoint-17200/training_args.bin b/checkpoints/checkpoint-17200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-17200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-172000/config.json b/checkpoints/checkpoint-172000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-172000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-172000/model.safetensors b/checkpoints/checkpoint-172000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ad0a904e7605d33d22d094d7451ab5962e5c8054 --- /dev/null +++ b/checkpoints/checkpoint-172000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:164f1196a3635f4fd4abdaf3ad0417991b273cc99f3d4c3c49c2b3ccd9e67bae +size 324662984 diff --git a/checkpoints/checkpoint-172000/training_args.bin b/checkpoints/checkpoint-172000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-172000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-17400/config.json b/checkpoints/checkpoint-17400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-17400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-17400/model.safetensors b/checkpoints/checkpoint-17400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..00714ddcb02cf17fdafbe4f3127b944df85a6934 --- /dev/null +++ b/checkpoints/checkpoint-17400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dcdd1a625cd8e0ecf6ed62a5ccbfe8f788dc8a3ab4f8c231fb655df393e27e7e +size 324662984 diff --git a/checkpoints/checkpoint-17400/training_args.bin b/checkpoints/checkpoint-17400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-17400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-174000/config.json b/checkpoints/checkpoint-174000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-174000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-174000/model.safetensors b/checkpoints/checkpoint-174000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e53ea37da64204bc06471d89e50475f171575f47 --- /dev/null +++ b/checkpoints/checkpoint-174000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b78125b52eef84fb56be5fe50d9e106cf359a5a7f4fef05d7037de9801f6cce +size 324662984 diff --git a/checkpoints/checkpoint-174000/training_args.bin b/checkpoints/checkpoint-174000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-174000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-17600/config.json b/checkpoints/checkpoint-17600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-17600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-17600/model.safetensors b/checkpoints/checkpoint-17600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6304337eca47d7f4f77d6bfb13d19abda799f2b7 --- /dev/null +++ b/checkpoints/checkpoint-17600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6c1d9744026c42b68a69ef37c8cb7e3d243d0d2b63575292f79f2ef0d23221c5 +size 324662984 diff --git a/checkpoints/checkpoint-17600/training_args.bin b/checkpoints/checkpoint-17600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-17600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-176000/config.json b/checkpoints/checkpoint-176000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-176000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-176000/model.safetensors b/checkpoints/checkpoint-176000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..58c708afb34adb97b823be56a9aa6b9b998b5d43 --- /dev/null +++ b/checkpoints/checkpoint-176000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b96917fbcb8e170e2516da16508731e6d45aca50b2309911482456e47cf4aa8 +size 324662984 diff --git a/checkpoints/checkpoint-176000/training_args.bin b/checkpoints/checkpoint-176000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-176000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-17800/config.json b/checkpoints/checkpoint-17800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-17800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-17800/model.safetensors b/checkpoints/checkpoint-17800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fb0a2ce9ded7f5301183d55d5647bb3567e5a2f6 --- /dev/null +++ b/checkpoints/checkpoint-17800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b42b2461212b3ef0ccc2201c2aed631d50f3d79d42b006bbc5876641609da322 +size 324662984 diff --git a/checkpoints/checkpoint-17800/training_args.bin b/checkpoints/checkpoint-17800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-17800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-178000/config.json b/checkpoints/checkpoint-178000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-178000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-178000/model.safetensors b/checkpoints/checkpoint-178000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2e33f266c17334067b6155c71aefb0880e7a0cbb --- /dev/null +++ b/checkpoints/checkpoint-178000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a7cd0aea49972f216c537841c0f17860a8ae34c7a7cc073b6fa71f74b36c9bdb +size 324662984 diff --git a/checkpoints/checkpoint-178000/training_args.bin b/checkpoints/checkpoint-178000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-178000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1800/config.json b/checkpoints/checkpoint-1800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1800/model.safetensors b/checkpoints/checkpoint-1800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c245d61245246b43c2166a893f9fed9423d45adb --- /dev/null +++ b/checkpoints/checkpoint-1800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7d76356f39f3ea1bc844089d869a36ab580b9335cdc4fe34d90ccf9f22dbbc6 +size 324662984 diff --git a/checkpoints/checkpoint-1800/training_args.bin b/checkpoints/checkpoint-1800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-18000/config.json b/checkpoints/checkpoint-18000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-18000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-18000/model.safetensors b/checkpoints/checkpoint-18000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6a1c6b560829af1cca71f0718b754009653df8b3 --- /dev/null +++ b/checkpoints/checkpoint-18000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2905dd6be2a10fa7319671e5cf5262a76afa0822ed81b5eecf03f0ed83bfc40a +size 324662984 diff --git a/checkpoints/checkpoint-18000/training_args.bin b/checkpoints/checkpoint-18000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-18000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-180000/config.json b/checkpoints/checkpoint-180000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-180000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-180000/model.safetensors b/checkpoints/checkpoint-180000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..edf4b46d00e66cb1d3912d2e19f80196b365aa50 --- /dev/null +++ b/checkpoints/checkpoint-180000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f86fb132815418237191c06fdbf2aac1659861f5c956a41848b18c38ac9e27a2 +size 324662984 diff --git a/checkpoints/checkpoint-180000/training_args.bin b/checkpoints/checkpoint-180000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-180000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-18200/config.json b/checkpoints/checkpoint-18200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-18200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-18200/model.safetensors b/checkpoints/checkpoint-18200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4bd5ce9f410ee9dcca8c201610595afadf1ae6a4 --- /dev/null +++ b/checkpoints/checkpoint-18200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e3b3dcfbd89ba719950768013119804da72410640d1842a711051f7d0748b35 +size 324662984 diff --git a/checkpoints/checkpoint-18200/training_args.bin b/checkpoints/checkpoint-18200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-18200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-182000/config.json b/checkpoints/checkpoint-182000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-182000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-182000/model.safetensors b/checkpoints/checkpoint-182000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dba32d9b3532af52c3cc7ce12b85cc319471f552 --- /dev/null +++ b/checkpoints/checkpoint-182000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c59821cc6e86543554c6755ec40fc44d1ddd252aeddb39ce8527a67e8ded608b +size 324662984 diff --git a/checkpoints/checkpoint-182000/training_args.bin b/checkpoints/checkpoint-182000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-182000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-18400/config.json b/checkpoints/checkpoint-18400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-18400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-18400/model.safetensors b/checkpoints/checkpoint-18400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0556c32bf926d4f99991381189d0d11166698cf6 --- /dev/null +++ b/checkpoints/checkpoint-18400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93f682f10245085f58f646c413022a8292ec7400ea29225984377218455f023b +size 324662984 diff --git a/checkpoints/checkpoint-18400/training_args.bin b/checkpoints/checkpoint-18400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-18400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-184000/config.json b/checkpoints/checkpoint-184000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-184000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-184000/model.safetensors b/checkpoints/checkpoint-184000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fdac59f9eee6615e385e780dd75b931cf7cf7393 --- /dev/null +++ b/checkpoints/checkpoint-184000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e10dcf5ecb4956d553b0f652678be369341987cffc9e0a09bff09b440283cc08 +size 324662984 diff --git a/checkpoints/checkpoint-184000/training_args.bin b/checkpoints/checkpoint-184000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-184000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-18600/config.json b/checkpoints/checkpoint-18600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-18600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-18600/model.safetensors b/checkpoints/checkpoint-18600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f3321369f92772020b35738ae01413c69decec3e --- /dev/null +++ b/checkpoints/checkpoint-18600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7bd31eb64c49b01db97776d72c2365eb5144f0a0b4e031f7225c218e69f73f3 +size 324662984 diff --git a/checkpoints/checkpoint-18600/training_args.bin b/checkpoints/checkpoint-18600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-18600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-186000/config.json b/checkpoints/checkpoint-186000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-186000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-186000/model.safetensors b/checkpoints/checkpoint-186000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f51061bc36c59b709c6ec4bd9429cc45fcf412bf --- /dev/null +++ b/checkpoints/checkpoint-186000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37c286c5821f47286edbb626941c8dc978614b911d07607cfb7bc2a5022d4145 +size 324662984 diff --git a/checkpoints/checkpoint-186000/training_args.bin b/checkpoints/checkpoint-186000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-186000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-18800/config.json b/checkpoints/checkpoint-18800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-18800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-18800/model.safetensors b/checkpoints/checkpoint-18800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..337ef0ca673a65ec336a34994d8795ce58a30853 --- /dev/null +++ b/checkpoints/checkpoint-18800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a627886542b350e13055eb47f1a5de6a9df988b042a16458784918bd61923a47 +size 324662984 diff --git a/checkpoints/checkpoint-18800/training_args.bin b/checkpoints/checkpoint-18800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-18800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-188000/config.json b/checkpoints/checkpoint-188000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-188000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-188000/model.safetensors b/checkpoints/checkpoint-188000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d88488e83c808e3a681e846b40df2f7b455f7369 --- /dev/null +++ b/checkpoints/checkpoint-188000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0bc3798623db85318fb2236ce03c2cb911a768657e55ee4cb1e610d359078a47 +size 324662984 diff --git a/checkpoints/checkpoint-188000/training_args.bin b/checkpoints/checkpoint-188000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-188000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-1900/config.json b/checkpoints/checkpoint-1900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-1900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-1900/model.safetensors b/checkpoints/checkpoint-1900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..047b327aa86c0588cd2f4f5bf65fabce760cbb2c --- /dev/null +++ b/checkpoints/checkpoint-1900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69e6ca58564e9e39dc2ce42afefe708eadf85677b3b771c23fc257e173c92eac +size 324662984 diff --git a/checkpoints/checkpoint-1900/training_args.bin b/checkpoints/checkpoint-1900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-1900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-19000/config.json b/checkpoints/checkpoint-19000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-19000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-19000/model.safetensors b/checkpoints/checkpoint-19000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..681fd2413bca533112d77fc75407d9abe53bba14 --- /dev/null +++ b/checkpoints/checkpoint-19000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68f98ef6abb3db5100d81d34d3616472f7d96b20ec47141b30db22850fd4eab2 +size 324662984 diff --git a/checkpoints/checkpoint-19000/training_args.bin b/checkpoints/checkpoint-19000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-19000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-190000/config.json b/checkpoints/checkpoint-190000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-190000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-190000/model.safetensors b/checkpoints/checkpoint-190000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7bf657683a2bf613210498eff098947e8d895e8e --- /dev/null +++ b/checkpoints/checkpoint-190000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daa2f7087de83ec291ea8e01c4dbe7f46a647723c9eae9ab22fe283b2689d0dc +size 324662984 diff --git a/checkpoints/checkpoint-190000/training_args.bin b/checkpoints/checkpoint-190000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-190000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-19200/config.json b/checkpoints/checkpoint-19200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-19200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-19200/model.safetensors b/checkpoints/checkpoint-19200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cafd73bb0cadad3d1591e2122259074b43bbb62d --- /dev/null +++ b/checkpoints/checkpoint-19200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5847d217de1b4f2bed52e036daa6eb0857ec40da2768afc9f11508c4ce932d51 +size 324662984 diff --git a/checkpoints/checkpoint-19200/training_args.bin b/checkpoints/checkpoint-19200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-19200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-192000/config.json b/checkpoints/checkpoint-192000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-192000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-192000/model.safetensors b/checkpoints/checkpoint-192000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..752f66c99933ed56188598a5d0cfa7a7255a68b0 --- /dev/null +++ b/checkpoints/checkpoint-192000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25e41822614ced3cf021b807233e5246ba2d0d1a1be1a063b94ecd6811a11fc1 +size 324662984 diff --git a/checkpoints/checkpoint-192000/training_args.bin b/checkpoints/checkpoint-192000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-192000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-19400/config.json b/checkpoints/checkpoint-19400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-19400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-19400/model.safetensors b/checkpoints/checkpoint-19400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b0e3aec72efd5e00ace35b56ef4ab77ecf233ed6 --- /dev/null +++ b/checkpoints/checkpoint-19400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69e9ee93fb90205234ddafd0903a7e25aa9bf7afa6c01b6bf76aa763eadfd046 +size 324662984 diff --git a/checkpoints/checkpoint-19400/training_args.bin b/checkpoints/checkpoint-19400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-19400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-194000/config.json b/checkpoints/checkpoint-194000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-194000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-194000/model.safetensors b/checkpoints/checkpoint-194000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..39fc70bd3319a18517165b990f714e10d6df2ec6 --- /dev/null +++ b/checkpoints/checkpoint-194000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc8b45ee7af7c6363d4a281b1871ca26865f03837c2c49b567e6a93eacb25033 +size 324662984 diff --git a/checkpoints/checkpoint-194000/training_args.bin b/checkpoints/checkpoint-194000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-194000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-19600/config.json b/checkpoints/checkpoint-19600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-19600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-19600/model.safetensors b/checkpoints/checkpoint-19600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..88d717ec025be1541f6d38edae5ff6ab5b449461 --- /dev/null +++ b/checkpoints/checkpoint-19600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc2f897305f4679583c1daed7b95a158b7ca57af6b80fe4b1e964c01e5684aa5 +size 324662984 diff --git a/checkpoints/checkpoint-19600/training_args.bin b/checkpoints/checkpoint-19600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-19600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-196000/config.json b/checkpoints/checkpoint-196000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-196000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-196000/model.safetensors b/checkpoints/checkpoint-196000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0b0c07e355faac29df90fdda8ce825f4f22c9d72 --- /dev/null +++ b/checkpoints/checkpoint-196000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b831d497d8feb3d4e31d5b5bd0b57c98ce848363b67d58e7a546e7cbcba6613 +size 324662984 diff --git a/checkpoints/checkpoint-196000/training_args.bin b/checkpoints/checkpoint-196000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-196000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-19800/config.json b/checkpoints/checkpoint-19800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-19800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-19800/model.safetensors b/checkpoints/checkpoint-19800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..84797ea8ab4140fc3d7d3f42752e084ea95638e2 --- /dev/null +++ b/checkpoints/checkpoint-19800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6757066f7b4b3b9150d0f98c007349fb5a8d3e81bc5fd2279ce3070d10ad663 +size 324662984 diff --git a/checkpoints/checkpoint-19800/training_args.bin b/checkpoints/checkpoint-19800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-19800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-198000/config.json b/checkpoints/checkpoint-198000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-198000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-198000/model.safetensors b/checkpoints/checkpoint-198000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..667b12a2842a35d25aea137f04bb5766b5f6a578 --- /dev/null +++ b/checkpoints/checkpoint-198000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:033f9b757d08ba74f613675c24a13d2d04bd79dd25a451f9707c036909ab5bb1 +size 324662984 diff --git a/checkpoints/checkpoint-198000/training_args.bin b/checkpoints/checkpoint-198000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-198000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-200/config.json b/checkpoints/checkpoint-200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-200/model.safetensors b/checkpoints/checkpoint-200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cd64efe8ddc12056d0acca1a4c3ed8d39e1a535b --- /dev/null +++ b/checkpoints/checkpoint-200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:edf55969730613dbd51ae4a74899221cbb44618deb698504294254053ec6e007 +size 324662984 diff --git a/checkpoints/checkpoint-200/training_args.bin b/checkpoints/checkpoint-200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2000/config.json b/checkpoints/checkpoint-2000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2000/model.safetensors b/checkpoints/checkpoint-2000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fcffa66ab5c28850c2db82a763fdfae98a6badb2 --- /dev/null +++ b/checkpoints/checkpoint-2000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4103b28eb5a0f43f2d1e4fadd58ada503f87f5f62a8b6a38c265d3749005004a +size 324662984 diff --git a/checkpoints/checkpoint-2000/training_args.bin b/checkpoints/checkpoint-2000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-20000/config.json b/checkpoints/checkpoint-20000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-20000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-20000/model.safetensors b/checkpoints/checkpoint-20000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f986b0402d8600249acf08c40d422acfc8a3119a --- /dev/null +++ b/checkpoints/checkpoint-20000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0597b7ab9002749f449d8ec171d46a602b8c5258abf98fa32d2b3347fe498f2 +size 324662984 diff --git a/checkpoints/checkpoint-20000/training_args.bin b/checkpoints/checkpoint-20000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-20000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-200000/config.json b/checkpoints/checkpoint-200000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-200000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-200000/model.safetensors b/checkpoints/checkpoint-200000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a24dd852ace11646068aef60c5ac32ece92ff0fe --- /dev/null +++ b/checkpoints/checkpoint-200000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89e36274e8efdbc10053f7767e20304a1cf935b26941c8c7e957ae75ee14b1a5 +size 324662984 diff --git a/checkpoints/checkpoint-200000/training_args.bin b/checkpoints/checkpoint-200000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-200000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-20500/config.json b/checkpoints/checkpoint-20500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-20500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-20500/model.safetensors b/checkpoints/checkpoint-20500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..288803c7d0731dfc9a2b85f1add24312fb546899 --- /dev/null +++ b/checkpoints/checkpoint-20500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdab98e92cf2b4d53b160ad59a2974787a730d62fb70e453101b787181d418eb +size 324662984 diff --git a/checkpoints/checkpoint-20500/training_args.bin b/checkpoints/checkpoint-20500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-20500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-205000/config.json b/checkpoints/checkpoint-205000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-205000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-205000/model.safetensors b/checkpoints/checkpoint-205000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..72040dca0a6eef3e4589fd481e16cb1f48aeb385 --- /dev/null +++ b/checkpoints/checkpoint-205000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78a7a0337d5b0862a20b8380bdcbddbed6c0c0bbed67427214f70eb671f4aa36 +size 324662984 diff --git a/checkpoints/checkpoint-205000/training_args.bin b/checkpoints/checkpoint-205000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-205000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2100/config.json b/checkpoints/checkpoint-2100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2100/model.safetensors b/checkpoints/checkpoint-2100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6651d8be31804cb31f33a4a034c7be7a831691cf --- /dev/null +++ b/checkpoints/checkpoint-2100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e6496d0b5bdb73bc585c2b366aff2a2e6630101830be727e59e410f33d6207f +size 324662984 diff --git a/checkpoints/checkpoint-2100/training_args.bin b/checkpoints/checkpoint-2100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-21000/config.json b/checkpoints/checkpoint-21000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-21000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-21000/model.safetensors b/checkpoints/checkpoint-21000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fb5ecabfbf9d003c73897dd1a96935fa09f6a54d --- /dev/null +++ b/checkpoints/checkpoint-21000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f41db922adb2940b019597496f6768174fe2fa2b1282aeabe4fe2f4980c2a4e5 +size 324662984 diff --git a/checkpoints/checkpoint-21000/training_args.bin b/checkpoints/checkpoint-21000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-21000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-210000/config.json b/checkpoints/checkpoint-210000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-210000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-210000/model.safetensors b/checkpoints/checkpoint-210000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d010e04e49a3b402fed5f00a9c7f1f085c763660 --- /dev/null +++ b/checkpoints/checkpoint-210000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:319125552fe2194674fa326ab88da8e0eb78e77f7d8935c38b27da68c1748ae6 +size 324662984 diff --git a/checkpoints/checkpoint-210000/training_args.bin b/checkpoints/checkpoint-210000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-210000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-21500/config.json b/checkpoints/checkpoint-21500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-21500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-21500/model.safetensors b/checkpoints/checkpoint-21500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c20e61f30e9774ab611a97605e6061906ee66df0 --- /dev/null +++ b/checkpoints/checkpoint-21500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76d490d75d9769b71cb59a88905b5d208a2e39e4b6a0ae987584ee082217d572 +size 324662984 diff --git a/checkpoints/checkpoint-21500/training_args.bin b/checkpoints/checkpoint-21500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-21500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-215000/config.json b/checkpoints/checkpoint-215000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-215000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-215000/model.safetensors b/checkpoints/checkpoint-215000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..26251cdb86e89f733d687ccee4adc8afcae2fb68 --- /dev/null +++ b/checkpoints/checkpoint-215000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fff3c8cc9a8c17e481918db762f8928c13df11cc99a70defe470744d3d894107 +size 324662984 diff --git a/checkpoints/checkpoint-215000/training_args.bin b/checkpoints/checkpoint-215000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-215000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2200/config.json b/checkpoints/checkpoint-2200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2200/model.safetensors b/checkpoints/checkpoint-2200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9b2ab60cd7e3a1f4de67aae39f108ccc2ed73828 --- /dev/null +++ b/checkpoints/checkpoint-2200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc49ec52c25e66a271d532a4363d8caa0c835dd015c46bda0f9c179cd486041e +size 324662984 diff --git a/checkpoints/checkpoint-2200/training_args.bin b/checkpoints/checkpoint-2200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-22000/config.json b/checkpoints/checkpoint-22000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-22000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-22000/model.safetensors b/checkpoints/checkpoint-22000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2cc899e66a3d320d9f687dfdc44d7241daf1ce64 --- /dev/null +++ b/checkpoints/checkpoint-22000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12fb7b9f3409e0504a4ae8e3b2a172f198463113e2e2d4f4598831617c7830c8 +size 324662984 diff --git a/checkpoints/checkpoint-22000/training_args.bin b/checkpoints/checkpoint-22000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-22000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-220000/config.json b/checkpoints/checkpoint-220000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-220000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-220000/model.safetensors b/checkpoints/checkpoint-220000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5b7817553c911a94ddffe1365a1f981eb9187e5f --- /dev/null +++ b/checkpoints/checkpoint-220000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cdcb88c6c27e941c6b5bfa9e2563b66c2ed1e824ca7613dd09c20567d735771 +size 324662984 diff --git a/checkpoints/checkpoint-220000/training_args.bin b/checkpoints/checkpoint-220000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-220000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-22500/config.json b/checkpoints/checkpoint-22500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-22500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-22500/model.safetensors b/checkpoints/checkpoint-22500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a3d5f5352030b908ce606759743f26154cc3abcc --- /dev/null +++ b/checkpoints/checkpoint-22500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b4f133a2136d0e5488c3c713fbfd84a023c32dfdcfb3e391e58b944e45cd7950 +size 324662984 diff --git a/checkpoints/checkpoint-22500/training_args.bin b/checkpoints/checkpoint-22500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-22500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-225000/config.json b/checkpoints/checkpoint-225000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-225000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-225000/model.safetensors b/checkpoints/checkpoint-225000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9b99b05e41135a9c80b531e7a44408e0bab7f67f --- /dev/null +++ b/checkpoints/checkpoint-225000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d767c4c173dc6e66ae34b858c4ef278de910ac92351801562674d9a545f46325 +size 324662984 diff --git a/checkpoints/checkpoint-225000/training_args.bin b/checkpoints/checkpoint-225000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-225000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2300/config.json b/checkpoints/checkpoint-2300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2300/model.safetensors b/checkpoints/checkpoint-2300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f26536568dd50a1c88ac08e515eee4e0248078ad --- /dev/null +++ b/checkpoints/checkpoint-2300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:434bd4f3bb49c9c746ec6b2f79e302377e9744b2d033a9ce8b764c3956a3b797 +size 324662984 diff --git a/checkpoints/checkpoint-2300/training_args.bin b/checkpoints/checkpoint-2300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-23000/config.json b/checkpoints/checkpoint-23000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-23000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-23000/model.safetensors b/checkpoints/checkpoint-23000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b097dd20ebfe8c704233d531cb61277210d0caa9 --- /dev/null +++ b/checkpoints/checkpoint-23000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:202bb8c80a685c43ac94f044da765afd45453c340388c1b7f81d9161ba550434 +size 324662984 diff --git a/checkpoints/checkpoint-23000/training_args.bin b/checkpoints/checkpoint-23000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-23000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-230000/config.json b/checkpoints/checkpoint-230000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-230000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-230000/model.safetensors b/checkpoints/checkpoint-230000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a2c39cd8e8c0fb396f02bf693974e314e7900502 --- /dev/null +++ b/checkpoints/checkpoint-230000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:047f58c42d742ef1237f89f53e4093dfe9fa9117dd57b8e8a0fb21b9b20b9282 +size 324662984 diff --git a/checkpoints/checkpoint-230000/training_args.bin b/checkpoints/checkpoint-230000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-230000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-23500/config.json b/checkpoints/checkpoint-23500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-23500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-23500/model.safetensors b/checkpoints/checkpoint-23500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7539e4245444aceed0d52eb96314551751fd7b0c --- /dev/null +++ b/checkpoints/checkpoint-23500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be8adcff91b67849706077b789fbe6b5e270704ae70448f5e089bfab88ad5ea7 +size 324662984 diff --git a/checkpoints/checkpoint-23500/training_args.bin b/checkpoints/checkpoint-23500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-23500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-235000/config.json b/checkpoints/checkpoint-235000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-235000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-235000/model.safetensors b/checkpoints/checkpoint-235000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1063d138bac61991fc098aa09b271d9a802461dd --- /dev/null +++ b/checkpoints/checkpoint-235000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:deec80fccc935680fc5035c02ffc22164ec8a7a0f55538555f8266a83f7636f1 +size 324662984 diff --git a/checkpoints/checkpoint-235000/training_args.bin b/checkpoints/checkpoint-235000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-235000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2400/config.json b/checkpoints/checkpoint-2400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2400/model.safetensors b/checkpoints/checkpoint-2400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..274c0631b56a0f1dcaee1a83b4ffc5fa2fb31b07 --- /dev/null +++ b/checkpoints/checkpoint-2400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afa2f37adb1768416305105ff9fd5051d63734fa753af00244402d2f0e96d8ce +size 324662984 diff --git a/checkpoints/checkpoint-2400/training_args.bin b/checkpoints/checkpoint-2400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-24000/config.json b/checkpoints/checkpoint-24000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-24000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-24000/model.safetensors b/checkpoints/checkpoint-24000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b892135c387a77e66e3d2a8d18eaa12057d68e5a --- /dev/null +++ b/checkpoints/checkpoint-24000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:322c73cd28e2b26b3db5eac3b9b058b4b4ea218221bbb6b4d2b2626b441b9970 +size 324662984 diff --git a/checkpoints/checkpoint-24000/training_args.bin b/checkpoints/checkpoint-24000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-24000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-240000/config.json b/checkpoints/checkpoint-240000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-240000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-240000/model.safetensors b/checkpoints/checkpoint-240000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1586cca4a44bf0a9f1c2d56a6994520d6ee2dbd0 --- /dev/null +++ b/checkpoints/checkpoint-240000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a1a3d17091a2e7fe47877732e4fca380e70d54fd5ea28908d9ea22a1f6f1824 +size 324662984 diff --git a/checkpoints/checkpoint-240000/training_args.bin b/checkpoints/checkpoint-240000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-240000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-24500/config.json b/checkpoints/checkpoint-24500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-24500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-24500/model.safetensors b/checkpoints/checkpoint-24500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5fd9a1d4fa9d111990ba19c76ebd467dbc26a909 --- /dev/null +++ b/checkpoints/checkpoint-24500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8944f9991680d818128624ca6aab5211518af647534e21cdf46667666c665f81 +size 324662984 diff --git a/checkpoints/checkpoint-24500/training_args.bin b/checkpoints/checkpoint-24500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-24500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-245000/config.json b/checkpoints/checkpoint-245000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-245000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-245000/model.safetensors b/checkpoints/checkpoint-245000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e09b89fc0c7eeda038af0a49da83d974f5a7e77a --- /dev/null +++ b/checkpoints/checkpoint-245000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:357b1020f0aca4a2834cf3fcc3a14cdcaf3c429c00a1b2feca11c6564a66d7b1 +size 324662984 diff --git a/checkpoints/checkpoint-245000/training_args.bin b/checkpoints/checkpoint-245000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-245000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2500/config.json b/checkpoints/checkpoint-2500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2500/model.safetensors b/checkpoints/checkpoint-2500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..da0167bb29568941c7d0360bac05c612c561b184 --- /dev/null +++ b/checkpoints/checkpoint-2500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0bbeaa9ee9aef7f080111531b377910f4876f06f434d1307f80b68faf7f2fff5 +size 324662984 diff --git a/checkpoints/checkpoint-2500/training_args.bin b/checkpoints/checkpoint-2500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-25000/config.json b/checkpoints/checkpoint-25000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-25000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-25000/model.safetensors b/checkpoints/checkpoint-25000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d871d62004bd89b532a3c0153eaa7166bd82c41d --- /dev/null +++ b/checkpoints/checkpoint-25000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:070a377c5f446cad418905dcc1c1a37d2b1c3a0373481a2bf515559fd47794f2 +size 324662984 diff --git a/checkpoints/checkpoint-25000/training_args.bin b/checkpoints/checkpoint-25000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-25000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-250000/config.json b/checkpoints/checkpoint-250000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-250000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-250000/model.safetensors b/checkpoints/checkpoint-250000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..977f82943e6f021c7186a8e72e7ef7daed1f2145 --- /dev/null +++ b/checkpoints/checkpoint-250000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5b55023f6e2d7bc2b83e4282979ffa9806be323b3df9a16a80af064aedbe4de +size 324662984 diff --git a/checkpoints/checkpoint-250000/training_args.bin b/checkpoints/checkpoint-250000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-250000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-25500/config.json b/checkpoints/checkpoint-25500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-25500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-25500/model.safetensors b/checkpoints/checkpoint-25500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5ce39300f7457d8362124df28c42b95cf7c6f655 --- /dev/null +++ b/checkpoints/checkpoint-25500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d13a745460c6c7cd9a9ad52b453c861d267913fe6eed6d660e8d9d2f6e8facb7 +size 324662984 diff --git a/checkpoints/checkpoint-25500/training_args.bin b/checkpoints/checkpoint-25500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-25500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-255000/config.json b/checkpoints/checkpoint-255000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-255000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-255000/model.safetensors b/checkpoints/checkpoint-255000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f964bff1f97acfcbe96cb44ff92570ff3bfe8d80 --- /dev/null +++ b/checkpoints/checkpoint-255000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3129cd9202c5644a96ea023c2b8c74dcc5d33a6e149b9932d76a2f1bdb148ecd +size 324662984 diff --git a/checkpoints/checkpoint-255000/training_args.bin b/checkpoints/checkpoint-255000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-255000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2600/config.json b/checkpoints/checkpoint-2600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2600/model.safetensors b/checkpoints/checkpoint-2600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3173ca506581990562d92c8d2df7080c13467243 --- /dev/null +++ b/checkpoints/checkpoint-2600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc9c294e9ffa039ad91164a2e789694624ed4a3ff1e521cdd5ecf8b0e25747f8 +size 324662984 diff --git a/checkpoints/checkpoint-2600/training_args.bin b/checkpoints/checkpoint-2600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-26000/config.json b/checkpoints/checkpoint-26000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-26000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-26000/model.safetensors b/checkpoints/checkpoint-26000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3f8303dc29534c80b7761dfddb8ee8f9a269c63d --- /dev/null +++ b/checkpoints/checkpoint-26000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a579da7bacf8124fe98b71b868495e1d11a6cd888425d607450fb74dde369f0c +size 324662984 diff --git a/checkpoints/checkpoint-26000/training_args.bin b/checkpoints/checkpoint-26000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-26000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-260000/config.json b/checkpoints/checkpoint-260000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-260000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-260000/model.safetensors b/checkpoints/checkpoint-260000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..38aa6e812389bcdd00d64f9b657ea7d414fdfa6c --- /dev/null +++ b/checkpoints/checkpoint-260000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27534420c16fe1796b5a9d764656b7a7b8e30871824017028c5a02750ff9eeab +size 324662984 diff --git a/checkpoints/checkpoint-260000/training_args.bin b/checkpoints/checkpoint-260000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-260000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-26500/config.json b/checkpoints/checkpoint-26500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-26500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-26500/model.safetensors b/checkpoints/checkpoint-26500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a025ad5fbd6711e70501866d1886f11d33aeffc2 --- /dev/null +++ b/checkpoints/checkpoint-26500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ff0e4361b5ec4afebe3a7b83cc1819dc1e7fd1db3bda8c78edf8e130078e12b +size 324662984 diff --git a/checkpoints/checkpoint-26500/training_args.bin b/checkpoints/checkpoint-26500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-26500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-265000/config.json b/checkpoints/checkpoint-265000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-265000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-265000/model.safetensors b/checkpoints/checkpoint-265000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..80f3d871b3fa03f0c1b522fd18d6e9a768f0036e --- /dev/null +++ b/checkpoints/checkpoint-265000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92a7e60bfbbac0a39b8a6f3c2b2c41c8ed768b208dfa931cc486e0d256840d3a +size 324662984 diff --git a/checkpoints/checkpoint-265000/training_args.bin b/checkpoints/checkpoint-265000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-265000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2700/config.json b/checkpoints/checkpoint-2700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2700/model.safetensors b/checkpoints/checkpoint-2700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0d254e1f7eee58d6e1afe4754ab9d0de57c5debd --- /dev/null +++ b/checkpoints/checkpoint-2700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1734badc56ae93c9ddd47fc3b13fb3eb3165472340cc6603e44fccf86e1f82a3 +size 324662984 diff --git a/checkpoints/checkpoint-2700/training_args.bin b/checkpoints/checkpoint-2700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-27000/config.json b/checkpoints/checkpoint-27000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-27000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-27000/model.safetensors b/checkpoints/checkpoint-27000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7b89103bbb108ab42fd91a8c87567645991a21f0 --- /dev/null +++ b/checkpoints/checkpoint-27000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5b59683ec9b54be6cc9a539a640ee2dcb3d88621277d2e8da8019e5ba8e6a6b +size 324662984 diff --git a/checkpoints/checkpoint-27000/training_args.bin b/checkpoints/checkpoint-27000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-27000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-270000/config.json b/checkpoints/checkpoint-270000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-270000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-270000/model.safetensors b/checkpoints/checkpoint-270000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7533ba70859db0a24443aa22dc4e2e50a39ed0ec --- /dev/null +++ b/checkpoints/checkpoint-270000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:732ab9d0777cdfc2e725a5b48b73b00e95cb6f048dbf95072d105e1711b3c5a4 +size 324662984 diff --git a/checkpoints/checkpoint-270000/training_args.bin b/checkpoints/checkpoint-270000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-270000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-27500/config.json b/checkpoints/checkpoint-27500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-27500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-27500/model.safetensors b/checkpoints/checkpoint-27500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..10a1426a0dda26a5038af2234a45f28b9da2a736 --- /dev/null +++ b/checkpoints/checkpoint-27500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7c9199cfbfb53c338a33a57478ec96f2c06601e21ba67fe5c37f6d9b2f8111e +size 324662984 diff --git a/checkpoints/checkpoint-27500/training_args.bin b/checkpoints/checkpoint-27500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-27500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-275000/config.json b/checkpoints/checkpoint-275000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-275000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-275000/model.safetensors b/checkpoints/checkpoint-275000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e819b6588b1705760b48bd0329cf040b14a39ebb --- /dev/null +++ b/checkpoints/checkpoint-275000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cac1fbd2e3bf0f1909c89581e88dbc03ad485dfc08fa013374ec6a304b8c3330 +size 324662984 diff --git a/checkpoints/checkpoint-275000/training_args.bin b/checkpoints/checkpoint-275000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-275000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2800/config.json b/checkpoints/checkpoint-2800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2800/model.safetensors b/checkpoints/checkpoint-2800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6424dcd394ad002b8b41510d3e80587011cee8c4 --- /dev/null +++ b/checkpoints/checkpoint-2800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c104e55d000f479f2778235a86fdbcdbbf6ded5d792fdaa6e827fb0300b8bc4 +size 324662984 diff --git a/checkpoints/checkpoint-2800/training_args.bin b/checkpoints/checkpoint-2800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-28000/config.json b/checkpoints/checkpoint-28000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-28000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-28000/model.safetensors b/checkpoints/checkpoint-28000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dc2eb7ea4c2081abf3ba47154bf980ecc5166dc9 --- /dev/null +++ b/checkpoints/checkpoint-28000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:755e4f0c7e5ff5d9ace67f858aed61988b3a3675809f6600eacc110392a73787 +size 324662984 diff --git a/checkpoints/checkpoint-28000/training_args.bin b/checkpoints/checkpoint-28000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-28000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-280000/config.json b/checkpoints/checkpoint-280000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-280000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-280000/model.safetensors b/checkpoints/checkpoint-280000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..256b08eb48268a8925a6c4e2f6a212ee893efc93 --- /dev/null +++ b/checkpoints/checkpoint-280000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ba18b869ec9be77c3f868c6f4532f653c38e360c9a63f3a4c5c6a537206f73e +size 324662984 diff --git a/checkpoints/checkpoint-280000/training_args.bin b/checkpoints/checkpoint-280000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-280000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-28500/config.json b/checkpoints/checkpoint-28500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-28500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-28500/model.safetensors b/checkpoints/checkpoint-28500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b690cf9bccb8f378b618d9330e1482bf75fdc4e1 --- /dev/null +++ b/checkpoints/checkpoint-28500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de0f0e680a4a398e60ba295a66410c69fd4ccb161e76ce18a53745456b8fffe6 +size 324662984 diff --git a/checkpoints/checkpoint-28500/training_args.bin b/checkpoints/checkpoint-28500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-28500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-285000/config.json b/checkpoints/checkpoint-285000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-285000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-285000/model.safetensors b/checkpoints/checkpoint-285000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..82120f24716c0eceae917f8d9bdcce8600f503e2 --- /dev/null +++ b/checkpoints/checkpoint-285000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f28237fdbd7c9e295ae09a046a7c5692f7dd97413b1777c61973642c2ed589d +size 324662984 diff --git a/checkpoints/checkpoint-285000/training_args.bin b/checkpoints/checkpoint-285000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-285000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-2900/config.json b/checkpoints/checkpoint-2900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-2900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-2900/model.safetensors b/checkpoints/checkpoint-2900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..421c9dabce199994b3181b8963e6d7f01c6dfc59 --- /dev/null +++ b/checkpoints/checkpoint-2900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b4f824eae05db91df9267a73a881b26f5fd4feef351090a3de5d9d25d520074 +size 324662984 diff --git a/checkpoints/checkpoint-2900/training_args.bin b/checkpoints/checkpoint-2900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-2900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-29000/config.json b/checkpoints/checkpoint-29000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-29000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-29000/model.safetensors b/checkpoints/checkpoint-29000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d120b2482a3abb266d34b4334cf7efb6a8d7031a --- /dev/null +++ b/checkpoints/checkpoint-29000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06dc407e40f813a6ea132d9d3470d6b81bb440c5628f72010b78cdd8c787e619 +size 324662984 diff --git a/checkpoints/checkpoint-29000/training_args.bin b/checkpoints/checkpoint-29000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-29000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-290000/config.json b/checkpoints/checkpoint-290000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-290000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-290000/model.safetensors b/checkpoints/checkpoint-290000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f56bd9672e699a3751829b95079438bbcf8ded88 --- /dev/null +++ b/checkpoints/checkpoint-290000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a2be98c01ff2cc24948021a9037baba6d7f815aaaa4796809c99c5509e9771d +size 324662984 diff --git a/checkpoints/checkpoint-290000/training_args.bin b/checkpoints/checkpoint-290000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-290000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-29500/config.json b/checkpoints/checkpoint-29500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-29500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-29500/model.safetensors b/checkpoints/checkpoint-29500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f16e6520a451e61b2efcdac6da69fc82a9acd0be --- /dev/null +++ b/checkpoints/checkpoint-29500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:356ab414521eac245826554563947bb80c92ad45d3811ae4513127b7c444ce4f +size 324662984 diff --git a/checkpoints/checkpoint-29500/training_args.bin b/checkpoints/checkpoint-29500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-29500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-295000/config.json b/checkpoints/checkpoint-295000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-295000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-295000/model.safetensors b/checkpoints/checkpoint-295000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b0cb265d30aaa7562954d62d0acf421ef0f627e9 --- /dev/null +++ b/checkpoints/checkpoint-295000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a764fc6eb765f98da21bfdecce1bed8e728f49d45b1c2756619912fc761e7b0 +size 324662984 diff --git a/checkpoints/checkpoint-295000/training_args.bin b/checkpoints/checkpoint-295000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-295000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-300/config.json b/checkpoints/checkpoint-300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-300/model.safetensors b/checkpoints/checkpoint-300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9946ff2d92f24b60ffa88f3bcc1908c459d3408d --- /dev/null +++ b/checkpoints/checkpoint-300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5d3bae7754979ae245969ec3a87d9a3e5acfc75ab7b5cb6af0967fa41f7c6a7 +size 324662984 diff --git a/checkpoints/checkpoint-300/training_args.bin b/checkpoints/checkpoint-300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3000/config.json b/checkpoints/checkpoint-3000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3000/model.safetensors b/checkpoints/checkpoint-3000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4383ebea2f3b3d10129efc0a30cf1db09ea3f66d --- /dev/null +++ b/checkpoints/checkpoint-3000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ccca912952667629d01a16ff87839fc84d8bbeace2e895187b04f1e19c064e3 +size 324662984 diff --git a/checkpoints/checkpoint-3000/training_args.bin b/checkpoints/checkpoint-3000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-30000/config.json b/checkpoints/checkpoint-30000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-30000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-30000/model.safetensors b/checkpoints/checkpoint-30000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ad22b27ae4e96703270b5c1ab37297e6d035f9c6 --- /dev/null +++ b/checkpoints/checkpoint-30000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ccf4cb3c6dc7a4be7eac274dc130eb8a143cea960b936b2370f778cda1972a5 +size 324662984 diff --git a/checkpoints/checkpoint-30000/training_args.bin b/checkpoints/checkpoint-30000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-30000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-300000/config.json b/checkpoints/checkpoint-300000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-300000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-300000/model.safetensors b/checkpoints/checkpoint-300000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7ac567d7da7eeb00d02ac6c35dd01e4832d570b5 --- /dev/null +++ b/checkpoints/checkpoint-300000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9cf6b1f006f01ddf4ffb61834142f1f4ecfd70960313b94a030e0522cc27190c +size 324662984 diff --git a/checkpoints/checkpoint-300000/training_args.bin b/checkpoints/checkpoint-300000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-300000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-30500/config.json b/checkpoints/checkpoint-30500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-30500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-30500/model.safetensors b/checkpoints/checkpoint-30500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9e20d60908cec5a9f954622e42bc28c19b6bd5d8 --- /dev/null +++ b/checkpoints/checkpoint-30500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f048ad8606e5837af0709a3c44800964e88efe8b6509b049bcd172013e3458a6 +size 324662984 diff --git a/checkpoints/checkpoint-30500/training_args.bin b/checkpoints/checkpoint-30500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-30500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-305000/config.json b/checkpoints/checkpoint-305000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-305000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-305000/model.safetensors b/checkpoints/checkpoint-305000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..57812493747df25f57f153b564d7b3f872dd1702 --- /dev/null +++ b/checkpoints/checkpoint-305000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59425064a52658abac8fc6e5ae2fbc97e2a1cbb4773b602997a350a5145077a6 +size 324662984 diff --git a/checkpoints/checkpoint-305000/training_args.bin b/checkpoints/checkpoint-305000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-305000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3100/config.json b/checkpoints/checkpoint-3100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3100/model.safetensors b/checkpoints/checkpoint-3100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a298ab9fcb8614775fac1990533f0bb9da0b8484 --- /dev/null +++ b/checkpoints/checkpoint-3100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a760e8f05eca364326deff965afafc2ba82b7e4b1d8230a91e3881cd1bbe893 +size 324662984 diff --git a/checkpoints/checkpoint-3100/training_args.bin b/checkpoints/checkpoint-3100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-31000/config.json b/checkpoints/checkpoint-31000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-31000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-31000/model.safetensors b/checkpoints/checkpoint-31000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7da9b32f19056641e6b43d7349244e34a310e3ec --- /dev/null +++ b/checkpoints/checkpoint-31000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2657dacdde770f009d602d2ccea7e8427f7e4b1264135d10914c1761932ec332 +size 324662984 diff --git a/checkpoints/checkpoint-31000/training_args.bin b/checkpoints/checkpoint-31000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-31000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-310000/config.json b/checkpoints/checkpoint-310000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-310000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-310000/model.safetensors b/checkpoints/checkpoint-310000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..04d539eacaf581f3a47c3b89b8905bc2a9ae0eaf --- /dev/null +++ b/checkpoints/checkpoint-310000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:591c21f00e9808db878b7e970767e1e0554596969eb5ec8365aaa339a0b4d48a +size 324662984 diff --git a/checkpoints/checkpoint-310000/training_args.bin b/checkpoints/checkpoint-310000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-310000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-31500/config.json b/checkpoints/checkpoint-31500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-31500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-31500/model.safetensors b/checkpoints/checkpoint-31500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4ce208b7b54ff31e5ac987cf64d6b2ffa8346a32 --- /dev/null +++ b/checkpoints/checkpoint-31500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32a726c01ce3eef8ca455b139738a59c74e83f8a173443449996c5c7728fe4d4 +size 324662984 diff --git a/checkpoints/checkpoint-31500/training_args.bin b/checkpoints/checkpoint-31500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-31500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-315000/config.json b/checkpoints/checkpoint-315000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-315000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-315000/model.safetensors b/checkpoints/checkpoint-315000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3880223e88a0f0586d5571bc01fe9253d37b327c --- /dev/null +++ b/checkpoints/checkpoint-315000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de591e302fd1bdfd00a4a857adacef9b43844eae4dd3c77474ef7aebfffbb1fb +size 324662984 diff --git a/checkpoints/checkpoint-315000/training_args.bin b/checkpoints/checkpoint-315000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-315000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3200/config.json b/checkpoints/checkpoint-3200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3200/model.safetensors b/checkpoints/checkpoint-3200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..63c157e45c8c7465449d703854b3c178f6ba4cdd --- /dev/null +++ b/checkpoints/checkpoint-3200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4241838d9599e00e9da2d1152910aa8343da248740d36c6affc7e1a0f2963d6a +size 324662984 diff --git a/checkpoints/checkpoint-3200/training_args.bin b/checkpoints/checkpoint-3200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-32000/config.json b/checkpoints/checkpoint-32000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-32000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-32000/model.safetensors b/checkpoints/checkpoint-32000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..be938fa628b5409a829684bf31f2147ce3d470fc --- /dev/null +++ b/checkpoints/checkpoint-32000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:353531f2d218cc5db0062bf0f318c03a0379f2dbd063fe5c654e823cfbfe2775 +size 324662984 diff --git a/checkpoints/checkpoint-32000/training_args.bin b/checkpoints/checkpoint-32000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-32000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-320000/config.json b/checkpoints/checkpoint-320000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-320000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-320000/model.safetensors b/checkpoints/checkpoint-320000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..52ca3c2653d03b47efd2d65c9a31a32389558e03 --- /dev/null +++ b/checkpoints/checkpoint-320000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99d79c65f9777e5cdb1ea9c108d9baf0b0ef4cf03cf1c809c73ea87c9a15b5cc +size 324662984 diff --git a/checkpoints/checkpoint-320000/training_args.bin b/checkpoints/checkpoint-320000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-320000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-32500/config.json b/checkpoints/checkpoint-32500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-32500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-32500/model.safetensors b/checkpoints/checkpoint-32500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2b3cf75f8e8f3d0f7e5fe543ffc2d029c300112a --- /dev/null +++ b/checkpoints/checkpoint-32500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cbe9a91aa0e79882bc0ed06ce238f3e3afc9d0cc8d7bcef2567d4e514eb4d1e2 +size 324662984 diff --git a/checkpoints/checkpoint-32500/training_args.bin b/checkpoints/checkpoint-32500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-32500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-325000/config.json b/checkpoints/checkpoint-325000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-325000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-325000/model.safetensors b/checkpoints/checkpoint-325000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..371be64c8fa20fc235b8a3f5be23f1e8714a5cd1 --- /dev/null +++ b/checkpoints/checkpoint-325000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2423b5feed006f6aef43786f88e2a9c60a116ba415f10db7b439fb1f8dbbaa7 +size 324662984 diff --git a/checkpoints/checkpoint-325000/training_args.bin b/checkpoints/checkpoint-325000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-325000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3300/config.json b/checkpoints/checkpoint-3300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3300/model.safetensors b/checkpoints/checkpoint-3300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2774d8e5276b53965427d3c13d94aeaba6eb08be --- /dev/null +++ b/checkpoints/checkpoint-3300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a341de7df9f36fc12f788059d9a091edda7f70412f1df83f27a2362a960e4021 +size 324662984 diff --git a/checkpoints/checkpoint-3300/training_args.bin b/checkpoints/checkpoint-3300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-33000/config.json b/checkpoints/checkpoint-33000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-33000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-33000/model.safetensors b/checkpoints/checkpoint-33000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ba47f1bfa82d689b1b9fc060c21c0f93c9bfd5a3 --- /dev/null +++ b/checkpoints/checkpoint-33000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4cc67efd0bd4a945b2ffe11b212ab28b70914634fa55cfd8cb33f9b65192f522 +size 324662984 diff --git a/checkpoints/checkpoint-33000/training_args.bin b/checkpoints/checkpoint-33000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-33000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-330000/config.json b/checkpoints/checkpoint-330000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-330000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-330000/model.safetensors b/checkpoints/checkpoint-330000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f672605dead0f324a6e83ea0d3a228b2b067e6df --- /dev/null +++ b/checkpoints/checkpoint-330000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b257c4a3d1de4e558586710a30ea7b75d16355f7bca3fb5d363d0fffb9ef008e +size 324662984 diff --git a/checkpoints/checkpoint-330000/training_args.bin b/checkpoints/checkpoint-330000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-330000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-33500/config.json b/checkpoints/checkpoint-33500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-33500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-33500/model.safetensors b/checkpoints/checkpoint-33500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..35f00b22a0cd500de1e277fd435fc7f12b210c47 --- /dev/null +++ b/checkpoints/checkpoint-33500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8094aa3b7751fb96257ee60a2a99b367de8223cc77e9ac2bdae65af53fe8fb25 +size 324662984 diff --git a/checkpoints/checkpoint-33500/training_args.bin b/checkpoints/checkpoint-33500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-33500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-335000/config.json b/checkpoints/checkpoint-335000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-335000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-335000/model.safetensors b/checkpoints/checkpoint-335000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8973de867b70388a6c25d457c22f85853dbbd188 --- /dev/null +++ b/checkpoints/checkpoint-335000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f6cf6cb43213efcf90a6428ceb6576c64e2aa53acddbb20fa22864747edebdfe +size 324662984 diff --git a/checkpoints/checkpoint-335000/training_args.bin b/checkpoints/checkpoint-335000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-335000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3400/config.json b/checkpoints/checkpoint-3400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3400/model.safetensors b/checkpoints/checkpoint-3400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..504250d9b458109a0a867e93ff962326815b59bd --- /dev/null +++ b/checkpoints/checkpoint-3400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dcee043734e00eac772dba64f70784940d87453c13f247ad97d881fb494214ee +size 324662984 diff --git a/checkpoints/checkpoint-3400/training_args.bin b/checkpoints/checkpoint-3400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-34000/config.json b/checkpoints/checkpoint-34000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-34000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-34000/model.safetensors b/checkpoints/checkpoint-34000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..66d19ca0236c53ef01ca0d54cc5bf01be58ee683 --- /dev/null +++ b/checkpoints/checkpoint-34000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a80000e2e9a425b26e252039ba3ed9c0a7dc36b6a5354f4a98ac3f89b382e1b9 +size 324662984 diff --git a/checkpoints/checkpoint-34000/training_args.bin b/checkpoints/checkpoint-34000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-34000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-340000/config.json b/checkpoints/checkpoint-340000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-340000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-340000/model.safetensors b/checkpoints/checkpoint-340000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e46f71e68477c0059e8c0dbbd3f2715d7a14fd64 --- /dev/null +++ b/checkpoints/checkpoint-340000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77c61ea6c4b8aa0f3538979825889bc93609e1eb469e87df31a866d1d959af1c +size 324662984 diff --git a/checkpoints/checkpoint-340000/training_args.bin b/checkpoints/checkpoint-340000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-340000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-34500/config.json b/checkpoints/checkpoint-34500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-34500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-34500/model.safetensors b/checkpoints/checkpoint-34500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1b558ada2af092d9e1bbb354ca36e5f092a734bc --- /dev/null +++ b/checkpoints/checkpoint-34500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ba899037fd1e515c10a6c2508aca89cc251910009736841be2928b7eb3ef02f +size 324662984 diff --git a/checkpoints/checkpoint-34500/training_args.bin b/checkpoints/checkpoint-34500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-34500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-345000/config.json b/checkpoints/checkpoint-345000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-345000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-345000/model.safetensors b/checkpoints/checkpoint-345000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4aef2dc69a1cc263c5a5b3e471580ca844047775 --- /dev/null +++ b/checkpoints/checkpoint-345000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85d588bde1a06095cd2ccc9d8b7cb7c282136d1b379ebc6083dd79fefa5ed840 +size 324662984 diff --git a/checkpoints/checkpoint-345000/training_args.bin b/checkpoints/checkpoint-345000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-345000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3500/config.json b/checkpoints/checkpoint-3500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3500/model.safetensors b/checkpoints/checkpoint-3500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2d6fc5da421a7deeffc8bc7b5318a1f3eaee9d6f --- /dev/null +++ b/checkpoints/checkpoint-3500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e83515a82065584ae07ad1a4b3d9ca6982d8f5389f2f7b14d30972ca4c6249bf +size 324662984 diff --git a/checkpoints/checkpoint-3500/training_args.bin b/checkpoints/checkpoint-3500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-35000/config.json b/checkpoints/checkpoint-35000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-35000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-35000/model.safetensors b/checkpoints/checkpoint-35000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..61a7bbd909d5860f48b6dfaa2a9b0c6299be43fe --- /dev/null +++ b/checkpoints/checkpoint-35000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8a30547a82f2d9017513aaaf0eea3d1c4af0d75b1a6d797cc906c4b1ae0cf5f +size 324662984 diff --git a/checkpoints/checkpoint-35000/training_args.bin b/checkpoints/checkpoint-35000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-35000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-350000/config.json b/checkpoints/checkpoint-350000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-350000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-350000/model.safetensors b/checkpoints/checkpoint-350000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7a3d0406539317e06fa60c8ed6e4b7e440bab8c5 --- /dev/null +++ b/checkpoints/checkpoint-350000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26914562ebe9f315cd0d142377a549dc00fc50ff9a88e9425d6929209ab4cf33 +size 324662984 diff --git a/checkpoints/checkpoint-350000/training_args.bin b/checkpoints/checkpoint-350000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-350000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-35500/config.json b/checkpoints/checkpoint-35500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-35500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-35500/model.safetensors b/checkpoints/checkpoint-35500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9838acee66f4e9ff992255a15a5b4735f0649dc0 --- /dev/null +++ b/checkpoints/checkpoint-35500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:045d5e3ccc4541896e4a7ed5d470823676047f3b42ce9662a9b2b829c7fc1596 +size 324662984 diff --git a/checkpoints/checkpoint-35500/training_args.bin b/checkpoints/checkpoint-35500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-35500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-355000/config.json b/checkpoints/checkpoint-355000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-355000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-355000/model.safetensors b/checkpoints/checkpoint-355000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e40d3f14e7889076e8760f381c8cfdf4e541878b --- /dev/null +++ b/checkpoints/checkpoint-355000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d52060c979b5af3c539c2b5449842732c58806bc5c1dc139682f30531a211e7 +size 324662984 diff --git a/checkpoints/checkpoint-355000/training_args.bin b/checkpoints/checkpoint-355000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-355000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3600/config.json b/checkpoints/checkpoint-3600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3600/model.safetensors b/checkpoints/checkpoint-3600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e6d49ad6291ada8146f2e4d5568708bcf836da86 --- /dev/null +++ b/checkpoints/checkpoint-3600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:40dd60b191a2b148e01507986da3b63fe544423946a114d9357afbbe782f8853 +size 324662984 diff --git a/checkpoints/checkpoint-3600/training_args.bin b/checkpoints/checkpoint-3600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-36000/config.json b/checkpoints/checkpoint-36000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-36000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-36000/model.safetensors b/checkpoints/checkpoint-36000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b32e2df1bc83900d735138372262dabd2928a702 --- /dev/null +++ b/checkpoints/checkpoint-36000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:996882c95e310e753f32180a979370b5393e1e54209658e74f680d96c477da93 +size 324662984 diff --git a/checkpoints/checkpoint-36000/training_args.bin b/checkpoints/checkpoint-36000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-36000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-360000/config.json b/checkpoints/checkpoint-360000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-360000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-360000/model.safetensors b/checkpoints/checkpoint-360000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..82cf0e3387ef79769bd53fa219a87ea6d22b24dc --- /dev/null +++ b/checkpoints/checkpoint-360000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:257d45084c16f6f6f8821b2851898d97b4b4e158df2408387902b33b52e7119c +size 324662984 diff --git a/checkpoints/checkpoint-360000/training_args.bin b/checkpoints/checkpoint-360000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-360000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-36500/config.json b/checkpoints/checkpoint-36500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-36500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-36500/model.safetensors b/checkpoints/checkpoint-36500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..df6fdcab062d7c09ccd1c8779aaf497affaf37d6 --- /dev/null +++ b/checkpoints/checkpoint-36500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:725171ea139f17b78fcae229a941f5da6b7e61c5a324d713f1a63083ee66e74c +size 324662984 diff --git a/checkpoints/checkpoint-36500/training_args.bin b/checkpoints/checkpoint-36500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-36500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-365000/config.json b/checkpoints/checkpoint-365000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-365000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-365000/model.safetensors b/checkpoints/checkpoint-365000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..be11d87d7dbe51b1f5706c2056954df2822c5a4b --- /dev/null +++ b/checkpoints/checkpoint-365000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4792863f51d92d80a1e76a354a46fedf72572e34d1a4c84739a404159b6ede7 +size 324662984 diff --git a/checkpoints/checkpoint-365000/training_args.bin b/checkpoints/checkpoint-365000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-365000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3700/config.json b/checkpoints/checkpoint-3700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3700/model.safetensors b/checkpoints/checkpoint-3700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..54d185bc566817125177cf5a3fcd0e8c51f2c4c4 --- /dev/null +++ b/checkpoints/checkpoint-3700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:824416d9fd4cb63299baa31e5574403b67a62cfd1c3ae1a02df1eecbe6d1de02 +size 324662984 diff --git a/checkpoints/checkpoint-3700/training_args.bin b/checkpoints/checkpoint-3700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-37000/config.json b/checkpoints/checkpoint-37000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-37000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-37000/model.safetensors b/checkpoints/checkpoint-37000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d3bb601391fe234552e57a3208fc80e67291845 --- /dev/null +++ b/checkpoints/checkpoint-37000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f67cfa5ad5bb8951c5f56f05a8cf05ac66b69718f6a13ff791368995730f02b +size 324662984 diff --git a/checkpoints/checkpoint-37000/training_args.bin b/checkpoints/checkpoint-37000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-37000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-370000/config.json b/checkpoints/checkpoint-370000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-370000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-370000/model.safetensors b/checkpoints/checkpoint-370000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..86c647b6875d267f953e44a9089cfde613670655 --- /dev/null +++ b/checkpoints/checkpoint-370000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33e246a019c147b9abcbb13b6b4e630789e55684dde236ef869780d96551d3cc +size 324662984 diff --git a/checkpoints/checkpoint-370000/training_args.bin b/checkpoints/checkpoint-370000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-370000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-37500/config.json b/checkpoints/checkpoint-37500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-37500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-37500/model.safetensors b/checkpoints/checkpoint-37500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bbf3a365f7eaa541775998d98f38fb51d2323cba --- /dev/null +++ b/checkpoints/checkpoint-37500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f41cb5e2e7c84c9c0fec53244331467281fba77e5cfcccb16b8093fbabc71d0 +size 324662984 diff --git a/checkpoints/checkpoint-37500/training_args.bin b/checkpoints/checkpoint-37500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-37500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-375000/config.json b/checkpoints/checkpoint-375000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-375000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-375000/model.safetensors b/checkpoints/checkpoint-375000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2697e7ddf4b7569b4b9cdf05e61f55c35fdaa905 --- /dev/null +++ b/checkpoints/checkpoint-375000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f3e7880dc38c2da3ee8b92dee005a1044af0a9efda8ed1a50cb7e8b33abea299 +size 324662984 diff --git a/checkpoints/checkpoint-375000/training_args.bin b/checkpoints/checkpoint-375000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-375000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3800/config.json b/checkpoints/checkpoint-3800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3800/model.safetensors b/checkpoints/checkpoint-3800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0c6fbbab108dc6bbc041c49042f96e9973e98c32 --- /dev/null +++ b/checkpoints/checkpoint-3800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ff5bedc7786be436283df3d167ea5d398468fe8720cb3de51e662970c3d5c72 +size 324662984 diff --git a/checkpoints/checkpoint-3800/training_args.bin b/checkpoints/checkpoint-3800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-38000/config.json b/checkpoints/checkpoint-38000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-38000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-38000/model.safetensors b/checkpoints/checkpoint-38000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f2394f6adc46b0eefef0a9d0a9fe5c599e73d170 --- /dev/null +++ b/checkpoints/checkpoint-38000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fd084894c8d864c5856477eed6fadd9d5682d01606bd31a68839cd155b90708 +size 324662984 diff --git a/checkpoints/checkpoint-38000/training_args.bin b/checkpoints/checkpoint-38000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-38000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-380000/config.json b/checkpoints/checkpoint-380000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-380000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-380000/model.safetensors b/checkpoints/checkpoint-380000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..08d24c082a64ebd621e4b9ecc15a16a529b8be3d --- /dev/null +++ b/checkpoints/checkpoint-380000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:945f370c97f835faf11e875c34aaf7cea9fe64a2e38ae24dfa5e8e7aed2ed5a9 +size 324662984 diff --git a/checkpoints/checkpoint-380000/training_args.bin b/checkpoints/checkpoint-380000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-380000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-38500/config.json b/checkpoints/checkpoint-38500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-38500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-38500/model.safetensors b/checkpoints/checkpoint-38500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3fcaa1f6b51e762ec988ad9762eac2937c2a8c1d --- /dev/null +++ b/checkpoints/checkpoint-38500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6b2b990155b4709bf25e7773eb3c8508065c9d21a62bd88a7128b083f15b690 +size 324662984 diff --git a/checkpoints/checkpoint-38500/training_args.bin b/checkpoints/checkpoint-38500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-38500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-385000/config.json b/checkpoints/checkpoint-385000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-385000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-385000/model.safetensors b/checkpoints/checkpoint-385000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fd5c6468d8160fe50fe1cffb232a0f762060e5ec --- /dev/null +++ b/checkpoints/checkpoint-385000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cce3215bcf95c6ebbb0e39b0e8ca6e4bfb9f7d49b7c7a4579a746fd77787c8a3 +size 324662984 diff --git a/checkpoints/checkpoint-385000/training_args.bin b/checkpoints/checkpoint-385000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-385000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-3900/config.json b/checkpoints/checkpoint-3900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-3900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-3900/model.safetensors b/checkpoints/checkpoint-3900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b65caa308a94f9b165db194d7434fff480cd507f --- /dev/null +++ b/checkpoints/checkpoint-3900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47b16de8298f582733585396568ea74e7bb152e034cd3a5dbaa335a6a332e6a6 +size 324662984 diff --git a/checkpoints/checkpoint-3900/training_args.bin b/checkpoints/checkpoint-3900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-3900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-39000/config.json b/checkpoints/checkpoint-39000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-39000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-39000/model.safetensors b/checkpoints/checkpoint-39000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2aaaa894263b7c1e9cc7e8a619ed652d391a25e3 --- /dev/null +++ b/checkpoints/checkpoint-39000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04c8d8582be1b94179ac2d862a61d67681f5faf218d9274f3d81282e265f58bb +size 324662984 diff --git a/checkpoints/checkpoint-39000/training_args.bin b/checkpoints/checkpoint-39000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-39000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-390000/config.json b/checkpoints/checkpoint-390000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-390000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-390000/model.safetensors b/checkpoints/checkpoint-390000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e75ef51e5a0d64bd91bf206244d9ed514ce447c --- /dev/null +++ b/checkpoints/checkpoint-390000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:faaf9766d616b9ccad7da54d73d45bbd358db003cc0b90ccfa9dde4b56b67b77 +size 324662984 diff --git a/checkpoints/checkpoint-390000/training_args.bin b/checkpoints/checkpoint-390000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-390000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-39500/config.json b/checkpoints/checkpoint-39500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-39500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-39500/model.safetensors b/checkpoints/checkpoint-39500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..094731c41a7c866bd90e4e1d8f0cf9aad96fe8ed --- /dev/null +++ b/checkpoints/checkpoint-39500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f8d86cbd01bd9611c4346224a430a47bafceb666508840e69bb2a8f0d3bb652 +size 324662984 diff --git a/checkpoints/checkpoint-39500/training_args.bin b/checkpoints/checkpoint-39500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-39500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-395000/config.json b/checkpoints/checkpoint-395000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-395000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-395000/model.safetensors b/checkpoints/checkpoint-395000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..585f81f9842f4813305b19ece71a5bdc243ee2ed --- /dev/null +++ b/checkpoints/checkpoint-395000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c2c3d543ccb06973fdc88907b2f5432ff6ecae6f48a196924fbabb6d2d2b9a4 +size 324662984 diff --git a/checkpoints/checkpoint-395000/training_args.bin b/checkpoints/checkpoint-395000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-395000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-400/config.json b/checkpoints/checkpoint-400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-400/model.safetensors b/checkpoints/checkpoint-400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..300b1759c5688fe8e97b91f024564a4c34e12431 --- /dev/null +++ b/checkpoints/checkpoint-400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a95c1f1795b16870b338f833b651ae10e56240724458b0a39834bc31011f037 +size 324662984 diff --git a/checkpoints/checkpoint-400/training_args.bin b/checkpoints/checkpoint-400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4000/config.json b/checkpoints/checkpoint-4000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4000/model.safetensors b/checkpoints/checkpoint-4000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..075995c63be63dc811db1310de623b7515279904 --- /dev/null +++ b/checkpoints/checkpoint-4000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff18df378f7dc643a73e91dfb9cc5296b54bb347eb89beb6a47334825a3f72b7 +size 324662984 diff --git a/checkpoints/checkpoint-4000/training_args.bin b/checkpoints/checkpoint-4000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-40000/config.json b/checkpoints/checkpoint-40000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-40000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-40000/model.safetensors b/checkpoints/checkpoint-40000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5b6bacfbe7c9f1c4c4a3e0d739a90600dacd1bb4 --- /dev/null +++ b/checkpoints/checkpoint-40000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63ad6aa3b0aed0428f6536115f2a43f10fbcdc77e88ce482f062629e60c7c43c +size 324662984 diff --git a/checkpoints/checkpoint-40000/training_args.bin b/checkpoints/checkpoint-40000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-40000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-400000/config.json b/checkpoints/checkpoint-400000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-400000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-400000/model.safetensors b/checkpoints/checkpoint-400000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0ce59c352d1c541a58760923b8cfee88363ed2a7 --- /dev/null +++ b/checkpoints/checkpoint-400000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5013ed295665b427d16b4845d444b749796bd23f8a7a17c2201a49fa134f78e +size 324662984 diff --git a/checkpoints/checkpoint-400000/training_args.bin b/checkpoints/checkpoint-400000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-400000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-40500/config.json b/checkpoints/checkpoint-40500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-40500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-40500/model.safetensors b/checkpoints/checkpoint-40500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b437a409184302b0bb39b956cbe85dddad93b53e --- /dev/null +++ b/checkpoints/checkpoint-40500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f60160af1317beb0a1f061495e3de1383a6b25f345294f9ad4fb9d623ce9515 +size 324662984 diff --git a/checkpoints/checkpoint-40500/training_args.bin b/checkpoints/checkpoint-40500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-40500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-405000/config.json b/checkpoints/checkpoint-405000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-405000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-405000/model.safetensors b/checkpoints/checkpoint-405000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..94b25b5c513ed0339920345d569483f80da18a23 --- /dev/null +++ b/checkpoints/checkpoint-405000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:750a77f084bea5faef90fdfe7f352b5feb02a689486b5f9241600d419826ab29 +size 324662984 diff --git a/checkpoints/checkpoint-405000/training_args.bin b/checkpoints/checkpoint-405000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-405000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4100/config.json b/checkpoints/checkpoint-4100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4100/model.safetensors b/checkpoints/checkpoint-4100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bf3061819b51a33ea69a8b4a488b076478f83fe6 --- /dev/null +++ b/checkpoints/checkpoint-4100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8d8387c10427e51f258543a7760259042a35fc50bda1a7ae3375ef415d5a2c2 +size 324662984 diff --git a/checkpoints/checkpoint-4100/training_args.bin b/checkpoints/checkpoint-4100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-41000/config.json b/checkpoints/checkpoint-41000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-41000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-41000/model.safetensors b/checkpoints/checkpoint-41000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8e8a53b2cf7f44c9c0c2dbe4221a6804a87ee5ca --- /dev/null +++ b/checkpoints/checkpoint-41000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7a051f3db26c04066f0556e03e8df36095e91a4bf47d0a740ccb8745c1af61d +size 324662984 diff --git a/checkpoints/checkpoint-41000/training_args.bin b/checkpoints/checkpoint-41000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-41000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-410000/config.json b/checkpoints/checkpoint-410000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-410000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-410000/model.safetensors b/checkpoints/checkpoint-410000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b5d31be0a3a741327b40024457b376aa26f646a3 --- /dev/null +++ b/checkpoints/checkpoint-410000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a14a436b1902ebc49d373f71ffd1966f1195843ec113edd0343887824f7786f6 +size 324662984 diff --git a/checkpoints/checkpoint-410000/training_args.bin b/checkpoints/checkpoint-410000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-410000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-41500/config.json b/checkpoints/checkpoint-41500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-41500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-41500/model.safetensors b/checkpoints/checkpoint-41500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b1f863c8c4d8057f87183727b01de17b4d9a2234 --- /dev/null +++ b/checkpoints/checkpoint-41500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dea005d0b4c5a7d1c9ad40e73735fd4fbe735e70dbcf4864d962211d69c82693 +size 324662984 diff --git a/checkpoints/checkpoint-41500/training_args.bin b/checkpoints/checkpoint-41500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-41500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-415000/config.json b/checkpoints/checkpoint-415000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-415000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-415000/model.safetensors b/checkpoints/checkpoint-415000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2ea0a1dec0545be3f74ca7f4f63b66cd8e80decd --- /dev/null +++ b/checkpoints/checkpoint-415000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6bf817d25dcb890d3b0b0f4674a38b4263dfb9cdcc589cd179966cd8f397067e +size 324662984 diff --git a/checkpoints/checkpoint-415000/training_args.bin b/checkpoints/checkpoint-415000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-415000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4200/config.json b/checkpoints/checkpoint-4200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4200/model.safetensors b/checkpoints/checkpoint-4200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f38e6d4b338c5f147dbbff24f61613dc1ff7bf7e --- /dev/null +++ b/checkpoints/checkpoint-4200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cc7ccab38a7413e867ccd4f97f4a9b5102e99c6c811a7499690e9ce358d9deb +size 324662984 diff --git a/checkpoints/checkpoint-4200/training_args.bin b/checkpoints/checkpoint-4200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-42000/config.json b/checkpoints/checkpoint-42000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-42000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-42000/model.safetensors b/checkpoints/checkpoint-42000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b12d85508024962d8f620ca872765a03b14d8449 --- /dev/null +++ b/checkpoints/checkpoint-42000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18eb81a1b704ae264326ebd299e8e0838f951d1ff38392971b0375e765b22f6b +size 324662984 diff --git a/checkpoints/checkpoint-42000/training_args.bin b/checkpoints/checkpoint-42000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-42000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-420000/config.json b/checkpoints/checkpoint-420000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-420000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-420000/model.safetensors b/checkpoints/checkpoint-420000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..02a2e7c7af7fc9ad103986150315e7d3c12005ff --- /dev/null +++ b/checkpoints/checkpoint-420000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ca808da0cc3e62a3077a3f248be564f3a7051a903a2c749ca29f1ca76322a453 +size 324662984 diff --git a/checkpoints/checkpoint-420000/training_args.bin b/checkpoints/checkpoint-420000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-420000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-42500/config.json b/checkpoints/checkpoint-42500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-42500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-42500/model.safetensors b/checkpoints/checkpoint-42500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e1bc02babfcf2317e3c1003b2c692fb825c3dde4 --- /dev/null +++ b/checkpoints/checkpoint-42500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74f241003308584f1dfc69fab385c1ce0d0dc1e014457282cec2afe3628e8ed5 +size 324662984 diff --git a/checkpoints/checkpoint-42500/training_args.bin b/checkpoints/checkpoint-42500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-42500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-425000/config.json b/checkpoints/checkpoint-425000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-425000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-425000/model.safetensors b/checkpoints/checkpoint-425000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e26b43bc0f1b1d6cba851f82461b42bacaffed36 --- /dev/null +++ b/checkpoints/checkpoint-425000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f9c73c92f9da9addb55ae3c3e2bfecc1ea0e2f2956ec4479ee732dc4854c680 +size 324662984 diff --git a/checkpoints/checkpoint-425000/training_args.bin b/checkpoints/checkpoint-425000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-425000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4300/config.json b/checkpoints/checkpoint-4300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4300/model.safetensors b/checkpoints/checkpoint-4300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c92ca0d237810ecafcfb054ec5a4eac6ac2bd32e --- /dev/null +++ b/checkpoints/checkpoint-4300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6038fd7b87254fd4bd5911299ebfcd174577eb665daa13deb24581446fcea7de +size 324662984 diff --git a/checkpoints/checkpoint-4300/training_args.bin b/checkpoints/checkpoint-4300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-43000/config.json b/checkpoints/checkpoint-43000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-43000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-43000/model.safetensors b/checkpoints/checkpoint-43000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..01c934b35cb68c3bd0861226f6b2ab93ee139fd9 --- /dev/null +++ b/checkpoints/checkpoint-43000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6b2678b53434235401e0ffbbcf82bb933fb2d445415453b325774c90d8b94e0 +size 324662984 diff --git a/checkpoints/checkpoint-43000/training_args.bin b/checkpoints/checkpoint-43000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-43000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-430000/config.json b/checkpoints/checkpoint-430000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-430000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-430000/model.safetensors b/checkpoints/checkpoint-430000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..edd88cfde9687996f7a84d7275cf0062784fb344 --- /dev/null +++ b/checkpoints/checkpoint-430000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee1077f009bd68f7c664916d4d20887598d13f2b39fc36afda840293e9590f10 +size 324662984 diff --git a/checkpoints/checkpoint-430000/training_args.bin b/checkpoints/checkpoint-430000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-430000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-43500/config.json b/checkpoints/checkpoint-43500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-43500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-43500/model.safetensors b/checkpoints/checkpoint-43500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..97b95978999f50eab039fa413ba2fcd7fc70fca3 --- /dev/null +++ b/checkpoints/checkpoint-43500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:181023b2c19cb3b9d36c0850a48d694f868843e2a602f1aeef00072bbcc84e9b +size 324662984 diff --git a/checkpoints/checkpoint-43500/training_args.bin b/checkpoints/checkpoint-43500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-43500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-435000/config.json b/checkpoints/checkpoint-435000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-435000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-435000/model.safetensors b/checkpoints/checkpoint-435000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..63ebdd978175b756e7a6cc5caae06d0a7bb041c9 --- /dev/null +++ b/checkpoints/checkpoint-435000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90b5649df450ee433f0bfa12c43c9e9a479758bc0b225fa601e1cc612a745bb6 +size 324662984 diff --git a/checkpoints/checkpoint-435000/training_args.bin b/checkpoints/checkpoint-435000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-435000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4400/config.json b/checkpoints/checkpoint-4400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4400/model.safetensors b/checkpoints/checkpoint-4400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..28dcc1bf15e4c9aa5f13dee1672a561ce730e12a --- /dev/null +++ b/checkpoints/checkpoint-4400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0be54e407c7c5aff2676e40feb8e4cb1267396971e5f251cbd03341d6d72c8c3 +size 324662984 diff --git a/checkpoints/checkpoint-4400/training_args.bin b/checkpoints/checkpoint-4400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-44000/config.json b/checkpoints/checkpoint-44000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-44000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-44000/model.safetensors b/checkpoints/checkpoint-44000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f65c309f7244583866eb4da1ce285f144b1ef2cd --- /dev/null +++ b/checkpoints/checkpoint-44000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b36c096f99f767e5d31c978e416dff17a8a5f143f45ae1d4f15150fcd559f1d +size 324662984 diff --git a/checkpoints/checkpoint-44000/training_args.bin b/checkpoints/checkpoint-44000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-44000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-440000/config.json b/checkpoints/checkpoint-440000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-440000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-440000/model.safetensors b/checkpoints/checkpoint-440000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3cf3ce6d962c261b2dbabe65a932c3bf77050a71 --- /dev/null +++ b/checkpoints/checkpoint-440000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cab354d13986c4a8b44af6358b477a96359021447706d10347c1625676f0f6ba +size 324662984 diff --git a/checkpoints/checkpoint-440000/training_args.bin b/checkpoints/checkpoint-440000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-440000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-44500/config.json b/checkpoints/checkpoint-44500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-44500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-44500/model.safetensors b/checkpoints/checkpoint-44500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b5ae536eb81254b7b67c22cf1ddca6b64fd294c9 --- /dev/null +++ b/checkpoints/checkpoint-44500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74dfff29c33ab30f3de09128cbd22d87e42797141b961a5d0c9265a33944703a +size 324662984 diff --git a/checkpoints/checkpoint-44500/training_args.bin b/checkpoints/checkpoint-44500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-44500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-445000/config.json b/checkpoints/checkpoint-445000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-445000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-445000/model.safetensors b/checkpoints/checkpoint-445000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6a34ae1170d5e8cc5964ca78d72d4c17f25c9e5d --- /dev/null +++ b/checkpoints/checkpoint-445000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71852632f553ff9a6bd891c351793870d83c5dd0f364cc3d2d416def2f775e18 +size 324662984 diff --git a/checkpoints/checkpoint-445000/training_args.bin b/checkpoints/checkpoint-445000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-445000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4500/config.json b/checkpoints/checkpoint-4500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4500/model.safetensors b/checkpoints/checkpoint-4500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d4cff7dfb22f316ea7faab96f2393179404bebcb --- /dev/null +++ b/checkpoints/checkpoint-4500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd74a2f0e1ec23aa1053b6574c5fc40435f547314143467cf3aedbdade5d844c +size 324662984 diff --git a/checkpoints/checkpoint-4500/training_args.bin b/checkpoints/checkpoint-4500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-45000/config.json b/checkpoints/checkpoint-45000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-45000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-45000/model.safetensors b/checkpoints/checkpoint-45000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7dfe6ce8f79c81ab3463a7265e0e6d572de570b0 --- /dev/null +++ b/checkpoints/checkpoint-45000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2f5ba2a730672043f54cc7b557a4d125ca5e12280b50f848ed99a9c5083777c +size 324662984 diff --git a/checkpoints/checkpoint-45000/training_args.bin b/checkpoints/checkpoint-45000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-45000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-450000/config.json b/checkpoints/checkpoint-450000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-450000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-450000/model.safetensors b/checkpoints/checkpoint-450000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b2bad735447215f62d35c0afb117b4f8ef5f0790 --- /dev/null +++ b/checkpoints/checkpoint-450000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a1cea49f24e8041d5ffa71eb1d575b845f7d1f94db466ea4aa839cd6ab06474 +size 324662984 diff --git a/checkpoints/checkpoint-450000/training_args.bin b/checkpoints/checkpoint-450000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-450000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-45500/config.json b/checkpoints/checkpoint-45500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-45500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-45500/model.safetensors b/checkpoints/checkpoint-45500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..56bfba1b5115fe12771131d1886e0195836c703c --- /dev/null +++ b/checkpoints/checkpoint-45500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8005780993dd3aaaf75869c771f5109b12e5a6491b27021965644d3a22072b8c +size 324662984 diff --git a/checkpoints/checkpoint-45500/training_args.bin b/checkpoints/checkpoint-45500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-45500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-455000/config.json b/checkpoints/checkpoint-455000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-455000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-455000/model.safetensors b/checkpoints/checkpoint-455000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fdfd6a1c91a79d35aa32be624ec316dd3e30dc38 --- /dev/null +++ b/checkpoints/checkpoint-455000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2bfc1e78f8ba2fcbdef8c27a58859722aa6dca0edf005d6ce0db60301d0ba947 +size 324662984 diff --git a/checkpoints/checkpoint-455000/training_args.bin b/checkpoints/checkpoint-455000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-455000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4600/config.json b/checkpoints/checkpoint-4600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4600/model.safetensors b/checkpoints/checkpoint-4600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5f4cd5dc451fbf1a46b187eb40136ddfb8504202 --- /dev/null +++ b/checkpoints/checkpoint-4600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f62e030d435be0f26cf6b72dbf326d9e0405707fa3b2c22a3b52db1bf47d6c40 +size 324662984 diff --git a/checkpoints/checkpoint-4600/training_args.bin b/checkpoints/checkpoint-4600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-46000/config.json b/checkpoints/checkpoint-46000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-46000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-46000/model.safetensors b/checkpoints/checkpoint-46000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..353ca2336023f6d417e92dc3e87af5d91d88de78 --- /dev/null +++ b/checkpoints/checkpoint-46000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:888c502ab1f78034a264a14a346f13ecc8d0c76e64c3d3b6ce6ff16b0b25bdfc +size 324662984 diff --git a/checkpoints/checkpoint-46000/training_args.bin b/checkpoints/checkpoint-46000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-46000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-460000/config.json b/checkpoints/checkpoint-460000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-460000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-460000/model.safetensors b/checkpoints/checkpoint-460000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..60d9f920d48b21c6d2fb860a0379e455790b3a3e --- /dev/null +++ b/checkpoints/checkpoint-460000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f19f057d5838fbfaab61b9c6bd2f12aac714f2e7aebb9bdcdfe992e21afc7900 +size 324662984 diff --git a/checkpoints/checkpoint-460000/training_args.bin b/checkpoints/checkpoint-460000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-460000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-46500/config.json b/checkpoints/checkpoint-46500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-46500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-46500/model.safetensors b/checkpoints/checkpoint-46500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..79c2b5ae431496f027ccc5a8d2e34e128fdd64be --- /dev/null +++ b/checkpoints/checkpoint-46500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da5483c5f5b93fdd876c993669036b0a368e66e4af0b4a9f557f7aba48229152 +size 324662984 diff --git a/checkpoints/checkpoint-46500/training_args.bin b/checkpoints/checkpoint-46500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-46500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-465000/config.json b/checkpoints/checkpoint-465000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-465000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-465000/model.safetensors b/checkpoints/checkpoint-465000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d831b6aab44224b3366a8213c25d196dcb28227a --- /dev/null +++ b/checkpoints/checkpoint-465000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10c7972143ef9d46bf2b96f72db5ed4aa5608cf34efcea1486b7980001bb50cc +size 324662984 diff --git a/checkpoints/checkpoint-465000/training_args.bin b/checkpoints/checkpoint-465000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-465000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4700/config.json b/checkpoints/checkpoint-4700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4700/model.safetensors b/checkpoints/checkpoint-4700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cc4198408d91742ebb0c2ebcba364a9f5d804ff8 --- /dev/null +++ b/checkpoints/checkpoint-4700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:43d4a3fedc930d5e7c220d9f18223a7c0cc2610556e61f1e9fea79bdb3e14111 +size 324662984 diff --git a/checkpoints/checkpoint-4700/training_args.bin b/checkpoints/checkpoint-4700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-47000/config.json b/checkpoints/checkpoint-47000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-47000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-47000/model.safetensors b/checkpoints/checkpoint-47000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ec6ca26d19add0e47d87abd0c1411522126aa342 --- /dev/null +++ b/checkpoints/checkpoint-47000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a7621041c27cd1110ffeb1445c3c0594315a281e4198c89419b055152d4f157 +size 324662984 diff --git a/checkpoints/checkpoint-47000/training_args.bin b/checkpoints/checkpoint-47000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-47000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-470000/config.json b/checkpoints/checkpoint-470000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-470000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-470000/model.safetensors b/checkpoints/checkpoint-470000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d2329a60df338c5b8e16c4d716756eebb480157 --- /dev/null +++ b/checkpoints/checkpoint-470000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa01c0eb0182a81cf74b9fc5cb246c7afce26202162be606c0083e4f7e519618 +size 324662984 diff --git a/checkpoints/checkpoint-470000/training_args.bin b/checkpoints/checkpoint-470000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-470000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-47500/config.json b/checkpoints/checkpoint-47500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-47500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-47500/model.safetensors b/checkpoints/checkpoint-47500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c76e5932ad8ac292797cdb98ea1dd93727aa5e0e --- /dev/null +++ b/checkpoints/checkpoint-47500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3aacafb7aabaa7d9c12c033a32dc5d5bad75468ad00b7d75ae0904b29e569ad +size 324662984 diff --git a/checkpoints/checkpoint-47500/training_args.bin b/checkpoints/checkpoint-47500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-47500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-475000/config.json b/checkpoints/checkpoint-475000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-475000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-475000/model.safetensors b/checkpoints/checkpoint-475000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bf2f611c7d31f559375c8a8972edff2cde72d37d --- /dev/null +++ b/checkpoints/checkpoint-475000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14cad8e64959513320b6526f407cb1a772197a026acdefe939a3b18c34af8967 +size 324662984 diff --git a/checkpoints/checkpoint-475000/training_args.bin b/checkpoints/checkpoint-475000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-475000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4800/config.json b/checkpoints/checkpoint-4800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4800/model.safetensors b/checkpoints/checkpoint-4800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..56d911d4ac1eb061dd6bc803849273305e5a7942 --- /dev/null +++ b/checkpoints/checkpoint-4800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87890c62fe0f0a48e7a3ae70cfdb4d8846f576b61a268005d5358f4c8d3a3106 +size 324662984 diff --git a/checkpoints/checkpoint-4800/training_args.bin b/checkpoints/checkpoint-4800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-48000/config.json b/checkpoints/checkpoint-48000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-48000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-48000/model.safetensors b/checkpoints/checkpoint-48000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..915083dfe33e42822cbd900cb240a989214117aa --- /dev/null +++ b/checkpoints/checkpoint-48000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51e18848014203ae87b2aede228465ac6f1385fe4e26db39e400798755845c34 +size 324662984 diff --git a/checkpoints/checkpoint-48000/training_args.bin b/checkpoints/checkpoint-48000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-48000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-480000/config.json b/checkpoints/checkpoint-480000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-480000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-480000/model.safetensors b/checkpoints/checkpoint-480000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..45d300d0943fb247c31c76d327805ad6f8fb5d3d --- /dev/null +++ b/checkpoints/checkpoint-480000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8a0ae92c1c3067777c78be4e65fa15def30901a7829dc513e7a4d068471f8bb +size 324662984 diff --git a/checkpoints/checkpoint-480000/training_args.bin b/checkpoints/checkpoint-480000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-480000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-48500/config.json b/checkpoints/checkpoint-48500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-48500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-48500/model.safetensors b/checkpoints/checkpoint-48500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..98050bdbe459ce796502973d636618bce908a747 --- /dev/null +++ b/checkpoints/checkpoint-48500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67e6c79c39d69c8bf17676490f50392bbdbcbd56f47b3629f59e2b4b63ba04cc +size 324662984 diff --git a/checkpoints/checkpoint-48500/training_args.bin b/checkpoints/checkpoint-48500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-48500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-485000/config.json b/checkpoints/checkpoint-485000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-485000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-485000/model.safetensors b/checkpoints/checkpoint-485000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e79a49a6ec92d5ca4467489ff47bd6bfff508a42 --- /dev/null +++ b/checkpoints/checkpoint-485000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7a058f4b867ed7f9680d6564a15432a627c9d50a93d5263648957fbc00c765d +size 324662984 diff --git a/checkpoints/checkpoint-485000/training_args.bin b/checkpoints/checkpoint-485000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-485000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-4900/config.json b/checkpoints/checkpoint-4900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-4900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-4900/model.safetensors b/checkpoints/checkpoint-4900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e014f40139473c1a8c73c1a7292e2ae0c838099 --- /dev/null +++ b/checkpoints/checkpoint-4900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:083ada258f8e280300bacf896069af21f2a62846df1fd3affc95b39f0f109092 +size 324662984 diff --git a/checkpoints/checkpoint-4900/training_args.bin b/checkpoints/checkpoint-4900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-4900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-49000/config.json b/checkpoints/checkpoint-49000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-49000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-49000/model.safetensors b/checkpoints/checkpoint-49000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7f3b17bc1806bb06d9cd06ee1f2ac573e500decb --- /dev/null +++ b/checkpoints/checkpoint-49000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f9abe43e36028ed21fdd12fbf1d13a5dcab9ca8f04f9fea1c7f6d9e8efcc90d +size 324662984 diff --git a/checkpoints/checkpoint-49000/training_args.bin b/checkpoints/checkpoint-49000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-49000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-490000/config.json b/checkpoints/checkpoint-490000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-490000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-490000/model.safetensors b/checkpoints/checkpoint-490000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a593b68985b7beab1f4c25df13318e5469e39248 --- /dev/null +++ b/checkpoints/checkpoint-490000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f78f01aa1c0cb49bb83531d11a0978a3d5a38ac5a99518b493b6f55a957e098a +size 324662984 diff --git a/checkpoints/checkpoint-490000/training_args.bin b/checkpoints/checkpoint-490000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-490000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-49500/config.json b/checkpoints/checkpoint-49500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-49500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-49500/model.safetensors b/checkpoints/checkpoint-49500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..48968aa744081ab3485a3012b50bc98a9ec86030 --- /dev/null +++ b/checkpoints/checkpoint-49500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e7260512dcb3d55b2bd79f57a12161f6f5e493ce46aa5aafc932ddde6c9893b +size 324662984 diff --git a/checkpoints/checkpoint-49500/training_args.bin b/checkpoints/checkpoint-49500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-49500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-495000/config.json b/checkpoints/checkpoint-495000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-495000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-495000/model.safetensors b/checkpoints/checkpoint-495000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..669e762e80a663fb07bc5cf3c1b85eb1eef25596 --- /dev/null +++ b/checkpoints/checkpoint-495000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6271964026eae7c33519214af23af21afc6ae50d119f96f40e3b1af752718050 +size 324662984 diff --git a/checkpoints/checkpoint-495000/training_args.bin b/checkpoints/checkpoint-495000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-495000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-500/config.json b/checkpoints/checkpoint-500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-500/model.safetensors b/checkpoints/checkpoint-500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0c483336d14996dcb9ac24cd065ff067c5a3be79 --- /dev/null +++ b/checkpoints/checkpoint-500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d027767f78675c23ad56d91eaae13513c659d2588bcbe2eb07b323c5a68161dd +size 324662984 diff --git a/checkpoints/checkpoint-500/training_args.bin b/checkpoints/checkpoint-500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5000/config.json b/checkpoints/checkpoint-5000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5000/model.safetensors b/checkpoints/checkpoint-5000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a4b30e414aac3975504b09b1211489e617322e2a --- /dev/null +++ b/checkpoints/checkpoint-5000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a0e78923b2244d79e2765c3377ae47afabf987582c61845855cb406ffc7482d +size 324662984 diff --git a/checkpoints/checkpoint-5000/training_args.bin b/checkpoints/checkpoint-5000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-50000/config.json b/checkpoints/checkpoint-50000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-50000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-50000/model.safetensors b/checkpoints/checkpoint-50000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bed65700c8d4f15b19c1e7aa0a22bb06b3552e02 --- /dev/null +++ b/checkpoints/checkpoint-50000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16e48a68b0bb99b686c4ce3afa883874b97f52ef39b34a94d6b658955a3b4830 +size 324662984 diff --git a/checkpoints/checkpoint-50000/training_args.bin b/checkpoints/checkpoint-50000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-50000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-500000/config.json b/checkpoints/checkpoint-500000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-500000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-500000/model.safetensors b/checkpoints/checkpoint-500000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1c6799538a201ba56a7c38c2d9a1c54721830c27 --- /dev/null +++ b/checkpoints/checkpoint-500000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0dcc857835165f18879b621d6e3b4fd05b7198fb4f3987c37294af9003325ee5 +size 324662984 diff --git a/checkpoints/checkpoint-500000/training_args.bin b/checkpoints/checkpoint-500000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-500000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-505000/config.json b/checkpoints/checkpoint-505000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-505000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-505000/model.safetensors b/checkpoints/checkpoint-505000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1194d6e1393d17024436124bdd9f6193d84bcb2b --- /dev/null +++ b/checkpoints/checkpoint-505000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8db586517d47eb4e44d7f55d74a4a523ef3110d4c0a5dd5567f47a142ae1658c +size 324662984 diff --git a/checkpoints/checkpoint-505000/training_args.bin b/checkpoints/checkpoint-505000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-505000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5100/config.json b/checkpoints/checkpoint-5100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5100/model.safetensors b/checkpoints/checkpoint-5100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b5446d4281cb65c9f624fb3fa0309bbf86bd7d1d --- /dev/null +++ b/checkpoints/checkpoint-5100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5feec909bd476e8bfa7d18cb7b1578ab2e397fbd054612a5f1ecbd5be6bc8975 +size 324662984 diff --git a/checkpoints/checkpoint-5100/training_args.bin b/checkpoints/checkpoint-5100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-51000/config.json b/checkpoints/checkpoint-51000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-51000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-51000/model.safetensors b/checkpoints/checkpoint-51000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f34026a723580e5614a526738d8832a883707aac --- /dev/null +++ b/checkpoints/checkpoint-51000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:266faf91de0926766552710cd905dba90b7fc5ce8ae942c3389fd093c15ac37b +size 324662984 diff --git a/checkpoints/checkpoint-51000/training_args.bin b/checkpoints/checkpoint-51000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-51000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-510000/config.json b/checkpoints/checkpoint-510000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-510000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-510000/model.safetensors b/checkpoints/checkpoint-510000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ef1bf9a4a815bab273dbed606e0e9b8cc9b3958e --- /dev/null +++ b/checkpoints/checkpoint-510000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d39a638130a936477ec60c4d5954b048285ab1c7b5b968b548934988a6cd8fe +size 324662984 diff --git a/checkpoints/checkpoint-510000/training_args.bin b/checkpoints/checkpoint-510000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-510000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-515000/config.json b/checkpoints/checkpoint-515000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-515000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-515000/model.safetensors b/checkpoints/checkpoint-515000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3782615d8d0bdaf0561f02f110afebf8940e45e5 --- /dev/null +++ b/checkpoints/checkpoint-515000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9cc8fd06bfd13ffb6e6e1e35765c5c934dec5a40278ea2d572f325daed2df90f +size 324662984 diff --git a/checkpoints/checkpoint-515000/training_args.bin b/checkpoints/checkpoint-515000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-515000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5200/config.json b/checkpoints/checkpoint-5200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5200/model.safetensors b/checkpoints/checkpoint-5200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..44d7c4e88620ac5dac58286f8bc136d77bdf13c1 --- /dev/null +++ b/checkpoints/checkpoint-5200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8402fddc690b9917a1f93cef9da1ae973835739f6af6df23d6b49c1853680a7 +size 324662984 diff --git a/checkpoints/checkpoint-5200/training_args.bin b/checkpoints/checkpoint-5200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-52000/config.json b/checkpoints/checkpoint-52000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-52000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-52000/model.safetensors b/checkpoints/checkpoint-52000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4f52e1292cd7658a48b40b65141b4f0c264132c1 --- /dev/null +++ b/checkpoints/checkpoint-52000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67932fb6761ac321490719e93e95dac3337624b3727473d6264e600cb9a74104 +size 324662984 diff --git a/checkpoints/checkpoint-52000/training_args.bin b/checkpoints/checkpoint-52000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-52000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-520000/config.json b/checkpoints/checkpoint-520000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-520000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-520000/model.safetensors b/checkpoints/checkpoint-520000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..962b7731c6806212ef4ef6feb554fcf5a1dbbe52 --- /dev/null +++ b/checkpoints/checkpoint-520000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36e2486624cc67db2eeb015ecf8b40ce797dc023b1830b28ba7d0fc88e5a28b9 +size 324662984 diff --git a/checkpoints/checkpoint-520000/training_args.bin b/checkpoints/checkpoint-520000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-520000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-525000/config.json b/checkpoints/checkpoint-525000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-525000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-525000/model.safetensors b/checkpoints/checkpoint-525000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8310716401bfd35c4fc04e2bb98da392bcfbfbe8 --- /dev/null +++ b/checkpoints/checkpoint-525000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d190885849dabf1d438af76f996425062f0dda1e82be712eabef350129ce7db +size 324662984 diff --git a/checkpoints/checkpoint-525000/training_args.bin b/checkpoints/checkpoint-525000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-525000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5300/config.json b/checkpoints/checkpoint-5300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5300/model.safetensors b/checkpoints/checkpoint-5300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7d8d470f8aa3e8f5d9f8d549a7acadf31b9aa9b7 --- /dev/null +++ b/checkpoints/checkpoint-5300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7a0a13f72a8119d53681debc91549057f03ededf1642d45da6d318ada16e048 +size 324662984 diff --git a/checkpoints/checkpoint-5300/training_args.bin b/checkpoints/checkpoint-5300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-53000/config.json b/checkpoints/checkpoint-53000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-53000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-53000/model.safetensors b/checkpoints/checkpoint-53000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..aee1004f184b4e3400608902c0774f8d08da4990 --- /dev/null +++ b/checkpoints/checkpoint-53000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cebdc7c3e23ccdeefcad679328c13069c4e7b5e38eb67ca893ef625dd565191 +size 324662984 diff --git a/checkpoints/checkpoint-53000/training_args.bin b/checkpoints/checkpoint-53000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-53000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-530000/config.json b/checkpoints/checkpoint-530000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-530000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-530000/model.safetensors b/checkpoints/checkpoint-530000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..66b917db79e51a8746f9c404b82c45f4fb9523ca --- /dev/null +++ b/checkpoints/checkpoint-530000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:180955d639e357794f999ae0911b0473a282e11ec30da1686e951652861f3c09 +size 324662984 diff --git a/checkpoints/checkpoint-530000/training_args.bin b/checkpoints/checkpoint-530000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-530000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-535000/config.json b/checkpoints/checkpoint-535000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-535000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-535000/model.safetensors b/checkpoints/checkpoint-535000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5d78ffeadcec91bfc6dbd35f3817e1293f478488 --- /dev/null +++ b/checkpoints/checkpoint-535000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2bfbaf10575e922182f0a70550090ec51add05f4e8353b9d79083655c2ccf524 +size 324662984 diff --git a/checkpoints/checkpoint-535000/training_args.bin b/checkpoints/checkpoint-535000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-535000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5400/config.json b/checkpoints/checkpoint-5400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5400/model.safetensors b/checkpoints/checkpoint-5400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b7af28fcd842e1918d1b976865fcb1cdf6d70b60 --- /dev/null +++ b/checkpoints/checkpoint-5400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dba866178ac53ecff78954f46050bc42e2ca058fdf0b7fb97785fb2c64e3a849 +size 324662984 diff --git a/checkpoints/checkpoint-5400/training_args.bin b/checkpoints/checkpoint-5400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-54000/config.json b/checkpoints/checkpoint-54000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-54000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-54000/model.safetensors b/checkpoints/checkpoint-54000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9c5af75c35510b94cbdaf31980ddf9295347d5bd --- /dev/null +++ b/checkpoints/checkpoint-54000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:680821e9ac8a115368c0041ccbc66029aac8f70c4acdfc3be6ae1c831d11134b +size 324662984 diff --git a/checkpoints/checkpoint-54000/training_args.bin b/checkpoints/checkpoint-54000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-54000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-540000/config.json b/checkpoints/checkpoint-540000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-540000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-540000/model.safetensors b/checkpoints/checkpoint-540000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8fcc1858631a706839bcb3f75e78144cee139c51 --- /dev/null +++ b/checkpoints/checkpoint-540000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e04ea18ca3bd56bdeaaadbeb841184554c79d23cfb3082651a11bb51865c34f +size 324662984 diff --git a/checkpoints/checkpoint-540000/training_args.bin b/checkpoints/checkpoint-540000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-540000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-545000/config.json b/checkpoints/checkpoint-545000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-545000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-545000/model.safetensors b/checkpoints/checkpoint-545000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..870a86c68f797d06e42b4285666695095fa4da0d --- /dev/null +++ b/checkpoints/checkpoint-545000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c81ad550c0a53d46389c13260cb603a5baaa05729cbdfc59cfd8fcea23dde7f +size 324662984 diff --git a/checkpoints/checkpoint-545000/training_args.bin b/checkpoints/checkpoint-545000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-545000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5500/config.json b/checkpoints/checkpoint-5500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5500/model.safetensors b/checkpoints/checkpoint-5500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..76d8da4649639b4f6ee8075cf2153ce68f7a8172 --- /dev/null +++ b/checkpoints/checkpoint-5500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88fd85c8f069ce2be6e5cd8b604f9feb17dad72d1518916c010fcc34de76ecb0 +size 324662984 diff --git a/checkpoints/checkpoint-5500/training_args.bin b/checkpoints/checkpoint-5500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-55000/config.json b/checkpoints/checkpoint-55000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-55000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-55000/model.safetensors b/checkpoints/checkpoint-55000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a7fd16b2b8f7e554f4361ce0180997213ef0623a --- /dev/null +++ b/checkpoints/checkpoint-55000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:793c749594fd16de76b4a0e977e4fc6655960571bc85067c5213b7cf644609dd +size 324662984 diff --git a/checkpoints/checkpoint-55000/training_args.bin b/checkpoints/checkpoint-55000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-55000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-550000/config.json b/checkpoints/checkpoint-550000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-550000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-550000/model.safetensors b/checkpoints/checkpoint-550000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9403d204bb4d91ab8b1b5c32ff96ef898af68902 --- /dev/null +++ b/checkpoints/checkpoint-550000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b33958073cb228ad8961d915d3195fd0aac1f4fcfebc322b3728d4d51524ceec +size 324662984 diff --git a/checkpoints/checkpoint-550000/training_args.bin b/checkpoints/checkpoint-550000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-550000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-555000/config.json b/checkpoints/checkpoint-555000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-555000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-555000/model.safetensors b/checkpoints/checkpoint-555000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d8166bf9f7e1617f9118151d2f4a078108888b7 --- /dev/null +++ b/checkpoints/checkpoint-555000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:228e06603173020e320c6f5675afd399a858b6cb198cc4c86fd1ac7dd2a57054 +size 324662984 diff --git a/checkpoints/checkpoint-555000/training_args.bin b/checkpoints/checkpoint-555000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-555000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5600/config.json b/checkpoints/checkpoint-5600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5600/model.safetensors b/checkpoints/checkpoint-5600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5196c6c9b9e68edf8ba4b8942561309c84905922 --- /dev/null +++ b/checkpoints/checkpoint-5600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:663de1c451dfb2d824aab7b8083fa4c554cf032a29fc605e922f933b4422af30 +size 324662984 diff --git a/checkpoints/checkpoint-5600/training_args.bin b/checkpoints/checkpoint-5600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-56000/config.json b/checkpoints/checkpoint-56000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-56000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-56000/model.safetensors b/checkpoints/checkpoint-56000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..00cb71985a064203b3a90080bdf7000d459ced2c --- /dev/null +++ b/checkpoints/checkpoint-56000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9965aa780f74d7849c35e46b707b3a591901dbf6b4a43493bb08ad7873424be +size 324662984 diff --git a/checkpoints/checkpoint-56000/training_args.bin b/checkpoints/checkpoint-56000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-56000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-560000/config.json b/checkpoints/checkpoint-560000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-560000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-560000/model.safetensors b/checkpoints/checkpoint-560000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3c7939724585b7416aa93f0afa02d781eb6bfcb9 --- /dev/null +++ b/checkpoints/checkpoint-560000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f272d383ae733bc8b242a83db21e6170eeaa48dcbf43c63c22c11b7dfef7d6fe +size 324662984 diff --git a/checkpoints/checkpoint-560000/training_args.bin b/checkpoints/checkpoint-560000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-560000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-565000/config.json b/checkpoints/checkpoint-565000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-565000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-565000/model.safetensors b/checkpoints/checkpoint-565000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..28c2615e4093fd00d41f38532b428851e01594d9 --- /dev/null +++ b/checkpoints/checkpoint-565000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84ad2e8c16b6019c20c0ab3e86725aa3a2de0eaffcbec573088678a70f34b7d7 +size 324662984 diff --git a/checkpoints/checkpoint-565000/training_args.bin b/checkpoints/checkpoint-565000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-565000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5700/config.json b/checkpoints/checkpoint-5700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5700/model.safetensors b/checkpoints/checkpoint-5700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f2442b305b711bf7025293a1d01b95c5569aa34a --- /dev/null +++ b/checkpoints/checkpoint-5700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:164c47c1238c58dd15c2145fed9cc72786fe3341460485bccae9980a5e19d13b +size 324662984 diff --git a/checkpoints/checkpoint-5700/training_args.bin b/checkpoints/checkpoint-5700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-57000/config.json b/checkpoints/checkpoint-57000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-57000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-57000/model.safetensors b/checkpoints/checkpoint-57000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e0c7ebf9df26ea5cd93ae04d4168e2125f919752 --- /dev/null +++ b/checkpoints/checkpoint-57000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13922f18a5fbd2419a3698e0586a272224220cc993dd6955ac119ab25b94e0ec +size 324662984 diff --git a/checkpoints/checkpoint-57000/training_args.bin b/checkpoints/checkpoint-57000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-57000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-570000/config.json b/checkpoints/checkpoint-570000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-570000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-570000/model.safetensors b/checkpoints/checkpoint-570000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7e7651a7a0e9c2fa99a7aa49782a6b86df54b8e7 --- /dev/null +++ b/checkpoints/checkpoint-570000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78b73e49426672e1ccdef22ac8c2ec7cf681ef48e7fb56fcbad21eac8906cad0 +size 324662984 diff --git a/checkpoints/checkpoint-570000/training_args.bin b/checkpoints/checkpoint-570000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-570000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-575000/config.json b/checkpoints/checkpoint-575000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-575000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-575000/model.safetensors b/checkpoints/checkpoint-575000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bb1cb7e15c2f295ddc1e0f712e205b00713395c0 --- /dev/null +++ b/checkpoints/checkpoint-575000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56826835612829dfa5c02a24d7606787ecd953719019dfa48a7a46f74b1efb31 +size 324662984 diff --git a/checkpoints/checkpoint-575000/training_args.bin b/checkpoints/checkpoint-575000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-575000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5800/config.json b/checkpoints/checkpoint-5800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5800/model.safetensors b/checkpoints/checkpoint-5800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f1457136c1f727e1b630e1ad9f620d5c3c70e7eb --- /dev/null +++ b/checkpoints/checkpoint-5800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6facabe57946821ab6bdfb06710e52ee8543d230002aa031dd6cbccf93712a08 +size 324662984 diff --git a/checkpoints/checkpoint-5800/training_args.bin b/checkpoints/checkpoint-5800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-58000/config.json b/checkpoints/checkpoint-58000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-58000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-58000/model.safetensors b/checkpoints/checkpoint-58000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ea32fb3a05554c4a2b37db52397a16ccbea86afe --- /dev/null +++ b/checkpoints/checkpoint-58000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74ebea7015560f5c5724eacdc9d821c866e1970b963a0f54facb3621cacf7279 +size 324662984 diff --git a/checkpoints/checkpoint-58000/training_args.bin b/checkpoints/checkpoint-58000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-58000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-580000/config.json b/checkpoints/checkpoint-580000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-580000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-580000/model.safetensors b/checkpoints/checkpoint-580000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ea45c26cfe1ea24e09502e7790c03c4bad28e747 --- /dev/null +++ b/checkpoints/checkpoint-580000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec78a917294525c1d2f1d2339824bb8794c0b9471190507630b527ff0819648a +size 324662984 diff --git a/checkpoints/checkpoint-580000/training_args.bin b/checkpoints/checkpoint-580000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-580000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-585000/config.json b/checkpoints/checkpoint-585000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-585000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-585000/model.safetensors b/checkpoints/checkpoint-585000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..52658784aa780da9853b5c11b89f35204d773c49 --- /dev/null +++ b/checkpoints/checkpoint-585000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc3911f10f8e32d9aa20eb87266c6224839266d53c8bbbe09fdf08da55056029 +size 324662984 diff --git a/checkpoints/checkpoint-585000/training_args.bin b/checkpoints/checkpoint-585000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-585000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-5900/config.json b/checkpoints/checkpoint-5900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-5900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-5900/model.safetensors b/checkpoints/checkpoint-5900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4764dd2799d5db8884e8d10d4639c45d65171bf8 --- /dev/null +++ b/checkpoints/checkpoint-5900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13435c0362d09999b2f1c79a767866e19540c202730035b3fa85886ad9496a34 +size 324662984 diff --git a/checkpoints/checkpoint-5900/training_args.bin b/checkpoints/checkpoint-5900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-5900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-59000/config.json b/checkpoints/checkpoint-59000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-59000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-59000/model.safetensors b/checkpoints/checkpoint-59000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..69dd0efad8a9e414eb8326f9f3f5110cf0299f78 --- /dev/null +++ b/checkpoints/checkpoint-59000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66f01085530156bf158ec51fe4d183cffe675428ee528845326ad718118b9f38 +size 324662984 diff --git a/checkpoints/checkpoint-59000/training_args.bin b/checkpoints/checkpoint-59000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-59000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-590000/config.json b/checkpoints/checkpoint-590000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-590000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-590000/model.safetensors b/checkpoints/checkpoint-590000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7cf80e830ccbb7c3517ff18ddcea3b25465a9430 --- /dev/null +++ b/checkpoints/checkpoint-590000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87ac55e684fa1e9d6e42b86afb85a874cbc5697605a8d20bcfcc787d6f44baa1 +size 324662984 diff --git a/checkpoints/checkpoint-590000/training_args.bin b/checkpoints/checkpoint-590000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-590000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-595000/config.json b/checkpoints/checkpoint-595000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-595000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-595000/model.safetensors b/checkpoints/checkpoint-595000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..45625cca964245438f71218ed3e6f3ed14b303c6 --- /dev/null +++ b/checkpoints/checkpoint-595000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:474d83f726eada7ef491f32a8f9d7aebd1c568f42e829cfebb7f7ccb966c5c39 +size 324662984 diff --git a/checkpoints/checkpoint-595000/training_args.bin b/checkpoints/checkpoint-595000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-595000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-600/config.json b/checkpoints/checkpoint-600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-600/model.safetensors b/checkpoints/checkpoint-600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4c2bae45d8d17d05fea00a933764384178cc8ea8 --- /dev/null +++ b/checkpoints/checkpoint-600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1df95885c04a9bfa0c3541f4f05768991b37db08696df862e0b28a0053c9df2b +size 324662984 diff --git a/checkpoints/checkpoint-600/training_args.bin b/checkpoints/checkpoint-600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6000/config.json b/checkpoints/checkpoint-6000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6000/model.safetensors b/checkpoints/checkpoint-6000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e57fd7e706f411b67c85a4e5f13e1e237ece06f6 --- /dev/null +++ b/checkpoints/checkpoint-6000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:033eb94fd4d49376d874e42ab6f634602421c70a42ed8a33167db5aabe6f18f0 +size 324662984 diff --git a/checkpoints/checkpoint-6000/training_args.bin b/checkpoints/checkpoint-6000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-60000/config.json b/checkpoints/checkpoint-60000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-60000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-60000/model.safetensors b/checkpoints/checkpoint-60000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..481e71909168104a5288a4c3a848351978b8cd65 --- /dev/null +++ b/checkpoints/checkpoint-60000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dddbf9318eb71560fdb1221b242dabf34c4f7b51678ec553ec681139b0897376 +size 324662984 diff --git a/checkpoints/checkpoint-60000/training_args.bin b/checkpoints/checkpoint-60000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-60000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-600000/config.json b/checkpoints/checkpoint-600000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-600000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-600000/model.safetensors b/checkpoints/checkpoint-600000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5425a84d56b91ce4795db9c585423bf1da6b0451 --- /dev/null +++ b/checkpoints/checkpoint-600000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f18cb007dab3b932be1648401b47be5a89f35bce76d9e9567094e268b4f04c7 +size 324662984 diff --git a/checkpoints/checkpoint-600000/training_args.bin b/checkpoints/checkpoint-600000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-600000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-605000/config.json b/checkpoints/checkpoint-605000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-605000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-605000/model.safetensors b/checkpoints/checkpoint-605000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..225b7893876d8dacc372bfe058a4ba70b0d5a8ab --- /dev/null +++ b/checkpoints/checkpoint-605000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:835465d150c9ef548d1efc6f4386d3596d0483fd11f45e7e2065da66033ba2fe +size 324662984 diff --git a/checkpoints/checkpoint-605000/training_args.bin b/checkpoints/checkpoint-605000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-605000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6100/config.json b/checkpoints/checkpoint-6100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6100/model.safetensors b/checkpoints/checkpoint-6100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..790e127772d84a320842df26c312defcc1405325 --- /dev/null +++ b/checkpoints/checkpoint-6100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0723399cddf0caf5bcf6032cf09532c0c2dee3f614cd384efbab7a76d29178bb +size 324662984 diff --git a/checkpoints/checkpoint-6100/training_args.bin b/checkpoints/checkpoint-6100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-61000/config.json b/checkpoints/checkpoint-61000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-61000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-61000/model.safetensors b/checkpoints/checkpoint-61000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b828a62842d76750aa6b76633e55e7c8cec9c058 --- /dev/null +++ b/checkpoints/checkpoint-61000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26846c9c0f31637d42ab26f53161e3dc031197656e3908922da585d031417795 +size 324662984 diff --git a/checkpoints/checkpoint-61000/training_args.bin b/checkpoints/checkpoint-61000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-61000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-610000/config.json b/checkpoints/checkpoint-610000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-610000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-610000/model.safetensors b/checkpoints/checkpoint-610000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fdd296b4b4e38c19716f8929462b184ce8e3e8b1 --- /dev/null +++ b/checkpoints/checkpoint-610000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2811b047e0e8d012b9ba4f4d72320d6b9885f63b12051c7698cc8561b1fca81 +size 324662984 diff --git a/checkpoints/checkpoint-610000/training_args.bin b/checkpoints/checkpoint-610000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-610000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-615000/config.json b/checkpoints/checkpoint-615000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-615000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-615000/model.safetensors b/checkpoints/checkpoint-615000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..03ac4b73e1555b6f57c1ae2435a6f507c32b81e3 --- /dev/null +++ b/checkpoints/checkpoint-615000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:588b3c1c9ad068a3d3d5314ce87600933e0119574e01118f66767cc78963f624 +size 324662984 diff --git a/checkpoints/checkpoint-615000/training_args.bin b/checkpoints/checkpoint-615000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-615000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6200/config.json b/checkpoints/checkpoint-6200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6200/model.safetensors b/checkpoints/checkpoint-6200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..27af4ad1e241265dbdda8303c16be50f4ba7af88 --- /dev/null +++ b/checkpoints/checkpoint-6200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76d2b9b6e5a98eb31288233484074c7d8053a9609d10dfe3cdb0c8070a4d323e +size 324662984 diff --git a/checkpoints/checkpoint-6200/training_args.bin b/checkpoints/checkpoint-6200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-62000/config.json b/checkpoints/checkpoint-62000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-62000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-62000/model.safetensors b/checkpoints/checkpoint-62000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f52527d71c9822a5cd1877a60c3f576798f4c83d --- /dev/null +++ b/checkpoints/checkpoint-62000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e3bc8d5c5a204a8469794b73d7f8148ad4064c35704b0e4e6425e26feeccf4b +size 324662984 diff --git a/checkpoints/checkpoint-62000/training_args.bin b/checkpoints/checkpoint-62000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-62000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-620000/config.json b/checkpoints/checkpoint-620000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-620000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-620000/model.safetensors b/checkpoints/checkpoint-620000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0df270a4463e8f421b91cc12d3695a178adee912 --- /dev/null +++ b/checkpoints/checkpoint-620000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55f660cb8d11c55f0bd19f7f804278b2e477d3dd6c66521655c05d8bd49e21f3 +size 324662984 diff --git a/checkpoints/checkpoint-620000/training_args.bin b/checkpoints/checkpoint-620000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-620000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-625000/config.json b/checkpoints/checkpoint-625000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-625000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-625000/model.safetensors b/checkpoints/checkpoint-625000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0a713e7fe8066c02372995a1e1e27533fe1ef147 --- /dev/null +++ b/checkpoints/checkpoint-625000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71fdf37371810734805ddaa91e1f88db0dc5802e80b6eed96c5b61a03eaf974c +size 324662984 diff --git a/checkpoints/checkpoint-625000/training_args.bin b/checkpoints/checkpoint-625000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-625000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6300/config.json b/checkpoints/checkpoint-6300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6300/model.safetensors b/checkpoints/checkpoint-6300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ec12d8168df6c77143298c31bf18c390a3409196 --- /dev/null +++ b/checkpoints/checkpoint-6300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a25d64204ae34460a28378e8dedc52c9505de1ecd30ffeffa646761bc0287dc +size 324662984 diff --git a/checkpoints/checkpoint-6300/training_args.bin b/checkpoints/checkpoint-6300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-63000/config.json b/checkpoints/checkpoint-63000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-63000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-63000/model.safetensors b/checkpoints/checkpoint-63000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4b96676f8e8099db91ecfee9a0838fc8c7defd08 --- /dev/null +++ b/checkpoints/checkpoint-63000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e177709d7a209fed9443ee2b2d3477efce1058838fb46a2791419c534ccde609 +size 324662984 diff --git a/checkpoints/checkpoint-63000/training_args.bin b/checkpoints/checkpoint-63000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-63000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-630000/config.json b/checkpoints/checkpoint-630000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-630000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-630000/model.safetensors b/checkpoints/checkpoint-630000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bd3aba3bedc36f0531ccb6c567a1db77bf5b13f0 --- /dev/null +++ b/checkpoints/checkpoint-630000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07a70ca881b3b799b3f758ad84c2bf0cfdc1fe02558719e8f208f383e53b7ae3 +size 324662984 diff --git a/checkpoints/checkpoint-630000/training_args.bin b/checkpoints/checkpoint-630000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-630000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-635000/config.json b/checkpoints/checkpoint-635000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-635000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-635000/model.safetensors b/checkpoints/checkpoint-635000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a152cb2d8f784d7147692a3091e5bf6f2b22a00f --- /dev/null +++ b/checkpoints/checkpoint-635000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:236f2160e23130950f4519de06732260c0834ae026aebc1bcad6685e56c842a3 +size 324662984 diff --git a/checkpoints/checkpoint-635000/training_args.bin b/checkpoints/checkpoint-635000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-635000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6400/config.json b/checkpoints/checkpoint-6400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6400/model.safetensors b/checkpoints/checkpoint-6400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..55d21129c7f22eab72e69c438674d256ae064461 --- /dev/null +++ b/checkpoints/checkpoint-6400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4685d58c061bce312a76fdc32d5e6f3a2d20e13a547f70c3ebbda78b2447ce1 +size 324662984 diff --git a/checkpoints/checkpoint-6400/training_args.bin b/checkpoints/checkpoint-6400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-64000/config.json b/checkpoints/checkpoint-64000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-64000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-64000/model.safetensors b/checkpoints/checkpoint-64000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..18386f5915e5cbaa3abef091bf25e167c2d13d84 --- /dev/null +++ b/checkpoints/checkpoint-64000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cdf3fe653d0cd69209313891c07c9bf56f481132fb42b60c42bee0d3866a04b2 +size 324662984 diff --git a/checkpoints/checkpoint-64000/training_args.bin b/checkpoints/checkpoint-64000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-64000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-640000/config.json b/checkpoints/checkpoint-640000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-640000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-640000/model.safetensors b/checkpoints/checkpoint-640000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..423d8e462d5ec6317857b75cccd7744c0fab50e6 --- /dev/null +++ b/checkpoints/checkpoint-640000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16e5c3b59b846cb4340d51123c8710d77692fd0b2b5a9313a3a5033996e5313d +size 324662984 diff --git a/checkpoints/checkpoint-640000/training_args.bin b/checkpoints/checkpoint-640000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-640000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-645000/config.json b/checkpoints/checkpoint-645000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-645000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-645000/model.safetensors b/checkpoints/checkpoint-645000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dbd18e342745f20282addaea12d9a4d42c399cda --- /dev/null +++ b/checkpoints/checkpoint-645000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7fda6db1fd124142765dfdbb4949d949068d83d133138a166a53c5388adf754 +size 324662984 diff --git a/checkpoints/checkpoint-645000/training_args.bin b/checkpoints/checkpoint-645000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-645000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6500/config.json b/checkpoints/checkpoint-6500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6500/model.safetensors b/checkpoints/checkpoint-6500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c6e366e42cb8de8949be7e197a6cc585eea630b5 --- /dev/null +++ b/checkpoints/checkpoint-6500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da8c50b1eb101b74867c45d8ebd625434377395f15f9e32727a33c32d6895873 +size 324662984 diff --git a/checkpoints/checkpoint-6500/training_args.bin b/checkpoints/checkpoint-6500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-65000/config.json b/checkpoints/checkpoint-65000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-65000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-65000/model.safetensors b/checkpoints/checkpoint-65000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4dfee4b4acc03db04560fd1978c5e27c3b76baa5 --- /dev/null +++ b/checkpoints/checkpoint-65000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5aa10800ae66bc8ffb1a581a4c29f84d2fa5204069edddd5cd48e4b42c68335 +size 324662984 diff --git a/checkpoints/checkpoint-65000/training_args.bin b/checkpoints/checkpoint-65000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-65000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-650000/config.json b/checkpoints/checkpoint-650000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-650000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-650000/model.safetensors b/checkpoints/checkpoint-650000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e5f6d43dcc7804de54ae305ac185ed50e852ee02 --- /dev/null +++ b/checkpoints/checkpoint-650000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd56553c088fe281a5da4e3c13c23af99dd3af2c97fe08fdabd94d9d9cfb2e6e +size 324662984 diff --git a/checkpoints/checkpoint-650000/training_args.bin b/checkpoints/checkpoint-650000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-650000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-655000/config.json b/checkpoints/checkpoint-655000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-655000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-655000/model.safetensors b/checkpoints/checkpoint-655000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5ad5bb161867aedb141ff8c940f26df87f1ac79d --- /dev/null +++ b/checkpoints/checkpoint-655000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c2573c5b35c9f1957485eae27016e9133b0cf3d812af77c58e6dddf047250c6 +size 324662984 diff --git a/checkpoints/checkpoint-655000/training_args.bin b/checkpoints/checkpoint-655000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-655000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6600/config.json b/checkpoints/checkpoint-6600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6600/model.safetensors b/checkpoints/checkpoint-6600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ac7f1040331c9f56713933edc123259eac231a50 --- /dev/null +++ b/checkpoints/checkpoint-6600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:80e9af2b49f8bff8ab58e944e2b399a526eefe4af06ee96db92d42a9e3a1622d +size 324662984 diff --git a/checkpoints/checkpoint-6600/training_args.bin b/checkpoints/checkpoint-6600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-66000/config.json b/checkpoints/checkpoint-66000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-66000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-66000/model.safetensors b/checkpoints/checkpoint-66000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..02226458f7c538df87bdd3fe2c3cf9b3c4e64af8 --- /dev/null +++ b/checkpoints/checkpoint-66000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ae94c1832843f0a666497c512177801df3b3a51a2db44ca1d91a8303aedf5ac +size 324662984 diff --git a/checkpoints/checkpoint-66000/training_args.bin b/checkpoints/checkpoint-66000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-66000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-660000/config.json b/checkpoints/checkpoint-660000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-660000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-660000/model.safetensors b/checkpoints/checkpoint-660000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..85c2edae54eaf3a30f0aa2e74c3a8a8c5d26ed2e --- /dev/null +++ b/checkpoints/checkpoint-660000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65affa1af12388a7923d90998884a5d541bf8330e993d339056c45dc720e82f9 +size 324662984 diff --git a/checkpoints/checkpoint-660000/training_args.bin b/checkpoints/checkpoint-660000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-660000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-665000/config.json b/checkpoints/checkpoint-665000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-665000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-665000/model.safetensors b/checkpoints/checkpoint-665000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d48c9cdc375239ff4bff18a0645dc7dc3e63bad --- /dev/null +++ b/checkpoints/checkpoint-665000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d67e401a38f9c4060ff98aefad0162a92e5ba4057328079f13aa06c99960250c +size 324662984 diff --git a/checkpoints/checkpoint-665000/training_args.bin b/checkpoints/checkpoint-665000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-665000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6700/config.json b/checkpoints/checkpoint-6700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6700/model.safetensors b/checkpoints/checkpoint-6700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7f81605221709a48706b78b29d5540f9208ef97d --- /dev/null +++ b/checkpoints/checkpoint-6700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09a42e500dcba181646e1fade8fc076a29336b20ac1235bb591d441cb8c0ee12 +size 324662984 diff --git a/checkpoints/checkpoint-6700/training_args.bin b/checkpoints/checkpoint-6700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-67000/config.json b/checkpoints/checkpoint-67000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-67000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-67000/model.safetensors b/checkpoints/checkpoint-67000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..948c40a35ac19af93ff2f8f2ecad6d9d066c67c0 --- /dev/null +++ b/checkpoints/checkpoint-67000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87292f0327d0b0d2b229f0bcfc3629c2f6c3c7462ca394ccb57e3f9795c79d78 +size 324662984 diff --git a/checkpoints/checkpoint-67000/training_args.bin b/checkpoints/checkpoint-67000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-67000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-670000/config.json b/checkpoints/checkpoint-670000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-670000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-670000/model.safetensors b/checkpoints/checkpoint-670000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d0f9bb08711e9ed43409d2894933fa56a7fcb0e2 --- /dev/null +++ b/checkpoints/checkpoint-670000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc2ac727c35f833dce8cfadafbc4af738b982ff2db51d98ce9d4c8bd73fe284a +size 324662984 diff --git a/checkpoints/checkpoint-670000/training_args.bin b/checkpoints/checkpoint-670000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-670000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-675000/config.json b/checkpoints/checkpoint-675000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-675000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-675000/model.safetensors b/checkpoints/checkpoint-675000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..78bb8e1b5bd5b7ee0f1b45f4a60653f653d0bb93 --- /dev/null +++ b/checkpoints/checkpoint-675000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d93ae06c3aa37edf97d112e9018a716643892625816ed213eec4e7313d1600d +size 324662984 diff --git a/checkpoints/checkpoint-675000/training_args.bin b/checkpoints/checkpoint-675000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-675000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6800/config.json b/checkpoints/checkpoint-6800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6800/model.safetensors b/checkpoints/checkpoint-6800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e73f3c1e83c03e002f31514ac82b34b944d1fd5f --- /dev/null +++ b/checkpoints/checkpoint-6800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8640eb1ae67f4d49c06c72481626b8edec304924dac667e939eb557c7d1caa6 +size 324662984 diff --git a/checkpoints/checkpoint-6800/training_args.bin b/checkpoints/checkpoint-6800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-68000/config.json b/checkpoints/checkpoint-68000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-68000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-68000/model.safetensors b/checkpoints/checkpoint-68000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..95b8ad9fd682a2a79b0c85af70e40fb65de1d31f --- /dev/null +++ b/checkpoints/checkpoint-68000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f29569976b845d13f7852de7d345fc093fff7d96b55ccd39f171671d12132b74 +size 324662984 diff --git a/checkpoints/checkpoint-68000/training_args.bin b/checkpoints/checkpoint-68000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-68000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-680000/config.json b/checkpoints/checkpoint-680000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-680000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-680000/model.safetensors b/checkpoints/checkpoint-680000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e2209a458eb449bd70880aed4e3316e764560472 --- /dev/null +++ b/checkpoints/checkpoint-680000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2b58adb5464630ba134d9a101e9b2f1fa513a292bc8331499d857d7c6dd8feb +size 324662984 diff --git a/checkpoints/checkpoint-680000/training_args.bin b/checkpoints/checkpoint-680000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-680000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-685000/config.json b/checkpoints/checkpoint-685000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-685000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-685000/model.safetensors b/checkpoints/checkpoint-685000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0dce3618efa9e11b1f8b26b2f296bdd4f2622d1e --- /dev/null +++ b/checkpoints/checkpoint-685000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c8040eb9caaa8a210d3a2bef4b1ce79ef46f945cfbf852084cf71539129d981 +size 324662984 diff --git a/checkpoints/checkpoint-685000/training_args.bin b/checkpoints/checkpoint-685000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-685000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-6900/config.json b/checkpoints/checkpoint-6900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-6900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-6900/model.safetensors b/checkpoints/checkpoint-6900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..843e4236fd25604c704139d310d64b96433f1926 --- /dev/null +++ b/checkpoints/checkpoint-6900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35f53e1c25388cc427cda9bae51fb3cbd3512accce6989d50676dcb9fe0d3788 +size 324662984 diff --git a/checkpoints/checkpoint-6900/training_args.bin b/checkpoints/checkpoint-6900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-6900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-69000/config.json b/checkpoints/checkpoint-69000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-69000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-69000/model.safetensors b/checkpoints/checkpoint-69000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5130b02744a80fd426bd1f09d9b55f259c15da5e --- /dev/null +++ b/checkpoints/checkpoint-69000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32cfcb271713e012b32a7ac34ad734bbe1388e67cb94a1e0d7a27c3d983208fe +size 324662984 diff --git a/checkpoints/checkpoint-69000/training_args.bin b/checkpoints/checkpoint-69000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-69000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-690000/config.json b/checkpoints/checkpoint-690000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-690000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-690000/model.safetensors b/checkpoints/checkpoint-690000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dd509c0139ea7b08df60d4dd72cf258a52ba2335 --- /dev/null +++ b/checkpoints/checkpoint-690000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31a74b919ab2f1cbe0dfc9bf40372d29974776d7e0779dd46d19f322ad95eaf5 +size 324662984 diff --git a/checkpoints/checkpoint-690000/training_args.bin b/checkpoints/checkpoint-690000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-690000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-695000/config.json b/checkpoints/checkpoint-695000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-695000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-695000/model.safetensors b/checkpoints/checkpoint-695000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bb2f48f8ce19445c0ac24804c284094cee2f0438 --- /dev/null +++ b/checkpoints/checkpoint-695000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f347400ee0140a8613cce14dc893c7d225f38c608bcd64f37c91d2e72b1e3c05 +size 324662984 diff --git a/checkpoints/checkpoint-695000/training_args.bin b/checkpoints/checkpoint-695000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-695000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-700/config.json b/checkpoints/checkpoint-700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-700/model.safetensors b/checkpoints/checkpoint-700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6621939083d883842d5c3c4cf1cd58d44992e03e --- /dev/null +++ b/checkpoints/checkpoint-700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ec7e13fc80f82a5b039d1f21ef14123638b5e0cbaf80f659f3231fcaea16f04 +size 324662984 diff --git a/checkpoints/checkpoint-700/training_args.bin b/checkpoints/checkpoint-700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7000/config.json b/checkpoints/checkpoint-7000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7000/model.safetensors b/checkpoints/checkpoint-7000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d395a2f9b136a558500a4b9079e4e65ba1f34f8b --- /dev/null +++ b/checkpoints/checkpoint-7000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48f83ef1e0b7b21d481fe36e418865569ee24d2dccd7f5d4b32c7cb8128cc599 +size 324662984 diff --git a/checkpoints/checkpoint-7000/training_args.bin b/checkpoints/checkpoint-7000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-70000/config.json b/checkpoints/checkpoint-70000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-70000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-70000/model.safetensors b/checkpoints/checkpoint-70000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9b1d03e5ec7e54c9110f8546a570998b5d2b5668 --- /dev/null +++ b/checkpoints/checkpoint-70000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76d6a72557fc2f2a6e15893acc6041c922e51e29ca4c10257b16c6012446d5ad +size 324662984 diff --git a/checkpoints/checkpoint-70000/training_args.bin b/checkpoints/checkpoint-70000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-70000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-700000/config.json b/checkpoints/checkpoint-700000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-700000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-700000/model.safetensors b/checkpoints/checkpoint-700000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1c918dc8ec5458719827505515434c54818c741a --- /dev/null +++ b/checkpoints/checkpoint-700000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5bb345646be276b06b6d3466193decd07ee4bb182dc50575105b001ebbcd8e3 +size 324662984 diff --git a/checkpoints/checkpoint-700000/training_args.bin b/checkpoints/checkpoint-700000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-700000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-705000/config.json b/checkpoints/checkpoint-705000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-705000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-705000/model.safetensors b/checkpoints/checkpoint-705000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d8223e7886d986f4e11bc6bdc9688433e5f62db --- /dev/null +++ b/checkpoints/checkpoint-705000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:373bddf37f4a5225644f23d1021f9b5aff60b1e1f974f0f08c4dfcd76327a278 +size 324662984 diff --git a/checkpoints/checkpoint-705000/training_args.bin b/checkpoints/checkpoint-705000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-705000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7100/config.json b/checkpoints/checkpoint-7100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7100/model.safetensors b/checkpoints/checkpoint-7100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1bf2b80e7828cc61fb815a7ef41c0c6b0d4fb908 --- /dev/null +++ b/checkpoints/checkpoint-7100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b32b5ff7b43133c7fb3d3ef4429ea01a0957e14e9c73097cd1b964dcfff0871 +size 324662984 diff --git a/checkpoints/checkpoint-7100/training_args.bin b/checkpoints/checkpoint-7100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-71000/config.json b/checkpoints/checkpoint-71000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-71000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-71000/model.safetensors b/checkpoints/checkpoint-71000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f6d931b462601ec6440b7586b4aa9362b04b8d79 --- /dev/null +++ b/checkpoints/checkpoint-71000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae87430bd6ec7bbdc70d9a92723818235e816ae3caca4406dbf09039919e91a9 +size 324662984 diff --git a/checkpoints/checkpoint-71000/training_args.bin b/checkpoints/checkpoint-71000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-71000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-710000/config.json b/checkpoints/checkpoint-710000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-710000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-710000/model.safetensors b/checkpoints/checkpoint-710000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d6f336e5606a958728cf7bec1f2ef1a33f8f9a14 --- /dev/null +++ b/checkpoints/checkpoint-710000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c4d3d1424ee5220b5e15475e2bc4e6ac7ad2efea8414beb644813133fc7025f +size 324662984 diff --git a/checkpoints/checkpoint-710000/training_args.bin b/checkpoints/checkpoint-710000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-710000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-715000/config.json b/checkpoints/checkpoint-715000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-715000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-715000/model.safetensors b/checkpoints/checkpoint-715000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f1e8585d534b862d69edf974d421a6bf7dbca214 --- /dev/null +++ b/checkpoints/checkpoint-715000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9144eddb7f473873b0fbcc9f7de9a8fe14ebf2ccb16d3f5b521b7fc29c433798 +size 324662984 diff --git a/checkpoints/checkpoint-715000/training_args.bin b/checkpoints/checkpoint-715000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-715000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7200/config.json b/checkpoints/checkpoint-7200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7200/model.safetensors b/checkpoints/checkpoint-7200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d167de5126c7ef7d0872d53a61afe83b6647f769 --- /dev/null +++ b/checkpoints/checkpoint-7200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b090c4643a06d85aa4e68ad6db544c0881d11668e352d38eb43e9d112839812 +size 324662984 diff --git a/checkpoints/checkpoint-7200/training_args.bin b/checkpoints/checkpoint-7200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-72000/config.json b/checkpoints/checkpoint-72000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-72000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-72000/model.safetensors b/checkpoints/checkpoint-72000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d816e00e911f6699447c31e2dbd33bab7f308567 --- /dev/null +++ b/checkpoints/checkpoint-72000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f98ec0eedd07fcff41b9d96af22f80b32d1bc00f4c50740cdb39254be3f878d5 +size 324662984 diff --git a/checkpoints/checkpoint-72000/training_args.bin b/checkpoints/checkpoint-72000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-72000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-720000/config.json b/checkpoints/checkpoint-720000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-720000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-720000/model.safetensors b/checkpoints/checkpoint-720000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..af55fda1e554f7cd4737b46c6520c02f6232bbd2 --- /dev/null +++ b/checkpoints/checkpoint-720000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e106256da8f43a029be9bf35bd5ba33e8e73fb9684dade3cf832b76dca28b23d +size 324662984 diff --git a/checkpoints/checkpoint-720000/training_args.bin b/checkpoints/checkpoint-720000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-720000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-725000/config.json b/checkpoints/checkpoint-725000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-725000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-725000/model.safetensors b/checkpoints/checkpoint-725000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6442f4a4bfd98dc1b2750236bd71981b19a85319 --- /dev/null +++ b/checkpoints/checkpoint-725000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a48bcea8a51e4ed0a3299afefd251bf858aac048fe0b3f769e4ac0cefaa4f355 +size 324662984 diff --git a/checkpoints/checkpoint-725000/training_args.bin b/checkpoints/checkpoint-725000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-725000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7300/config.json b/checkpoints/checkpoint-7300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7300/model.safetensors b/checkpoints/checkpoint-7300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f9718f46a31de311fdd09529ae83f4e2de8e0c28 --- /dev/null +++ b/checkpoints/checkpoint-7300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f399bbdc6e4f4833a5425cf4fad8ed8d5aeddbf5a1f38e37af848b786f6fbcf +size 324662984 diff --git a/checkpoints/checkpoint-7300/training_args.bin b/checkpoints/checkpoint-7300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-73000/config.json b/checkpoints/checkpoint-73000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-73000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-73000/model.safetensors b/checkpoints/checkpoint-73000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c6dcdc2cefae8c4431e891f7549a2e4c8cc79bb4 --- /dev/null +++ b/checkpoints/checkpoint-73000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5aca52d8a908a59c2ff66e9dadd51072ef5ec3b598b986e4279f963f16c654a +size 324662984 diff --git a/checkpoints/checkpoint-73000/training_args.bin b/checkpoints/checkpoint-73000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-73000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-730000/config.json b/checkpoints/checkpoint-730000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-730000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-730000/model.safetensors b/checkpoints/checkpoint-730000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3b1e9927ba2934d1e8e053dc412fe12182d2d4e0 --- /dev/null +++ b/checkpoints/checkpoint-730000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52787bddb198b0925c76ff1fb1c9dd1794df25565c9366a44e8ab4f106580576 +size 324662984 diff --git a/checkpoints/checkpoint-730000/training_args.bin b/checkpoints/checkpoint-730000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-730000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-735000/config.json b/checkpoints/checkpoint-735000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-735000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-735000/model.safetensors b/checkpoints/checkpoint-735000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..564c6f0cea19a4919a00ac0b99ed17cc9e1753cd --- /dev/null +++ b/checkpoints/checkpoint-735000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d376d54f745067189add2934f9abe4a46063258307dd0e39f4fa7968e29d0d9 +size 324662984 diff --git a/checkpoints/checkpoint-735000/training_args.bin b/checkpoints/checkpoint-735000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-735000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7400/config.json b/checkpoints/checkpoint-7400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7400/model.safetensors b/checkpoints/checkpoint-7400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d7d99301b3f961d26002b1f513abf01ba023631 --- /dev/null +++ b/checkpoints/checkpoint-7400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d3d12f2cf90d7f3050791873d25d30a9b837bb18f873d44c5a89dbffea1fd99 +size 324662984 diff --git a/checkpoints/checkpoint-7400/training_args.bin b/checkpoints/checkpoint-7400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-74000/config.json b/checkpoints/checkpoint-74000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-74000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-74000/model.safetensors b/checkpoints/checkpoint-74000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b0b053bf99406d9e434a0d1373c987a0e51cf14c --- /dev/null +++ b/checkpoints/checkpoint-74000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9be32172dbf52ae47c1845ee1b05fa7743609a495e003ee29baf1eaf860bc054 +size 324662984 diff --git a/checkpoints/checkpoint-74000/training_args.bin b/checkpoints/checkpoint-74000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-74000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-740000/config.json b/checkpoints/checkpoint-740000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-740000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-740000/model.safetensors b/checkpoints/checkpoint-740000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d7166fb40d380d3131dd269ec7b4b7373d39215c --- /dev/null +++ b/checkpoints/checkpoint-740000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19933cf80b4ab862b5e69b9f02427390c50fa66f09fcdb1ca7b8cc8b9f6b1b16 +size 324662984 diff --git a/checkpoints/checkpoint-740000/training_args.bin b/checkpoints/checkpoint-740000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-740000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-745000/config.json b/checkpoints/checkpoint-745000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-745000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-745000/model.safetensors b/checkpoints/checkpoint-745000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7b5b1c08f90aa73b5defd653c3faac365db81598 --- /dev/null +++ b/checkpoints/checkpoint-745000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f54ae7c9cd7ea42ec45547e6a859cb8093a66ba195365acaeffe2d7bc3ae1f81 +size 324662984 diff --git a/checkpoints/checkpoint-745000/training_args.bin b/checkpoints/checkpoint-745000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-745000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7500/config.json b/checkpoints/checkpoint-7500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7500/model.safetensors b/checkpoints/checkpoint-7500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5b3a7f06716798a038a3558e7f1f9b0ce69933a8 --- /dev/null +++ b/checkpoints/checkpoint-7500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f66db13508939d84a1de7d109e1e04b35d1618fff71459b3bae6d2d393c927ce +size 324662984 diff --git a/checkpoints/checkpoint-7500/training_args.bin b/checkpoints/checkpoint-7500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-75000/config.json b/checkpoints/checkpoint-75000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-75000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-75000/model.safetensors b/checkpoints/checkpoint-75000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..31a76b71760d06e27cf9b90f496a7c3709ae28da --- /dev/null +++ b/checkpoints/checkpoint-75000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be02b3566be393382ca50b0177201f4f13d04ffc6cc60d73ccf1254295662571 +size 324662984 diff --git a/checkpoints/checkpoint-75000/training_args.bin b/checkpoints/checkpoint-75000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-75000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-750000/config.json b/checkpoints/checkpoint-750000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-750000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-750000/model.safetensors b/checkpoints/checkpoint-750000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ae16346766869221ce08046f00a6c447a89dec39 --- /dev/null +++ b/checkpoints/checkpoint-750000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f227b1234a74520f32b0985d92e52dec865be21d97fc3d319c8edaa0145dd2e5 +size 324662984 diff --git a/checkpoints/checkpoint-750000/training_args.bin b/checkpoints/checkpoint-750000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-750000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-755000/config.json b/checkpoints/checkpoint-755000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-755000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-755000/model.safetensors b/checkpoints/checkpoint-755000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..52cf224634fbd68cdf1a0e839817662f38977548 --- /dev/null +++ b/checkpoints/checkpoint-755000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37fd681530e357b33e6bdfa023dfd84b20ee9648ddce1e7a16c60701e886a1be +size 324662984 diff --git a/checkpoints/checkpoint-755000/training_args.bin b/checkpoints/checkpoint-755000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-755000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7600/config.json b/checkpoints/checkpoint-7600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7600/model.safetensors b/checkpoints/checkpoint-7600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5c01c2d64a6a2572ac9fc7bf417e2d200645ab18 --- /dev/null +++ b/checkpoints/checkpoint-7600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e36fcd70a5d0cb365c733a3c1151c51f09e102dd3f7b562ac5e54819b1519a81 +size 324662984 diff --git a/checkpoints/checkpoint-7600/training_args.bin b/checkpoints/checkpoint-7600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-76000/config.json b/checkpoints/checkpoint-76000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-76000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-76000/model.safetensors b/checkpoints/checkpoint-76000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bca18e3e2e508184c6294900d6f0ba4dadf52768 --- /dev/null +++ b/checkpoints/checkpoint-76000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b5d87605f13e6dd7fae7c5a84061df79922f6b6ff4579d3954b370f9203278b +size 324662984 diff --git a/checkpoints/checkpoint-76000/training_args.bin b/checkpoints/checkpoint-76000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-76000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-760000/config.json b/checkpoints/checkpoint-760000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-760000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-760000/model.safetensors b/checkpoints/checkpoint-760000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..41cf34dbbff5ca39ddad843f9d7209b9acd80b60 --- /dev/null +++ b/checkpoints/checkpoint-760000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a4968738aa1d2c46e1a9d9c94cdc8741cc1d757e7032c07b27d584da7fdcd613 +size 324662984 diff --git a/checkpoints/checkpoint-760000/training_args.bin b/checkpoints/checkpoint-760000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-760000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-765000/config.json b/checkpoints/checkpoint-765000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-765000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-765000/model.safetensors b/checkpoints/checkpoint-765000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d32e566e3bdb246462eed54ca60e6781441248ef --- /dev/null +++ b/checkpoints/checkpoint-765000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:401731e06eefba9123e9687fda35fa108ae5169adfa78cdf4f09b19a15b257b8 +size 324662984 diff --git a/checkpoints/checkpoint-765000/training_args.bin b/checkpoints/checkpoint-765000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-765000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7700/config.json b/checkpoints/checkpoint-7700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7700/model.safetensors b/checkpoints/checkpoint-7700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..56d05685d200d74a5b31e09d3c45787628064b97 --- /dev/null +++ b/checkpoints/checkpoint-7700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41f9bbe8ae907428b3314fe33ae894a5be50e824916a6a8b4263550a3463762f +size 324662984 diff --git a/checkpoints/checkpoint-7700/training_args.bin b/checkpoints/checkpoint-7700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-77000/config.json b/checkpoints/checkpoint-77000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-77000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-77000/model.safetensors b/checkpoints/checkpoint-77000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dabd8716d8e0e0c4cec9ae1836638389b439f1a2 --- /dev/null +++ b/checkpoints/checkpoint-77000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:887a56916d1ac788d75888348dbe4266a9f07f62c1acd8bc7af58e7b0cbee770 +size 324662984 diff --git a/checkpoints/checkpoint-77000/training_args.bin b/checkpoints/checkpoint-77000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-77000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-770000/config.json b/checkpoints/checkpoint-770000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-770000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-770000/model.safetensors b/checkpoints/checkpoint-770000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..95c7ff56319b2700efa4cefd2a34242bef3eaf91 --- /dev/null +++ b/checkpoints/checkpoint-770000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3829449783ffceb02d5e1f7b85c7c1222c076acd8999a9bc141e904bddf79c11 +size 324662984 diff --git a/checkpoints/checkpoint-770000/training_args.bin b/checkpoints/checkpoint-770000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-770000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-775000/config.json b/checkpoints/checkpoint-775000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-775000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-775000/model.safetensors b/checkpoints/checkpoint-775000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..67370d73f2eb71b45b78e07fb181fdd6bc7884e5 --- /dev/null +++ b/checkpoints/checkpoint-775000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48303234c92af56dc739de98f61be9e9223761219a20a64e02dc9c60a0931f47 +size 324662984 diff --git a/checkpoints/checkpoint-775000/training_args.bin b/checkpoints/checkpoint-775000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-775000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7800/config.json b/checkpoints/checkpoint-7800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7800/model.safetensors b/checkpoints/checkpoint-7800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0951435997774d608e372f27eccb2feb3116ba2d --- /dev/null +++ b/checkpoints/checkpoint-7800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7485996f94050cb7d8eb922a7302c01836f24bec4ca0150bc9651de14db4d75 +size 324662984 diff --git a/checkpoints/checkpoint-7800/training_args.bin b/checkpoints/checkpoint-7800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-78000/config.json b/checkpoints/checkpoint-78000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-78000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-78000/model.safetensors b/checkpoints/checkpoint-78000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2f6c2b3e0afc278d571f30fc1f6b21c7bdcad5f5 --- /dev/null +++ b/checkpoints/checkpoint-78000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2dc2a314a0854f24182f4b41b3673d3dc77933c617567f923bdd0aaa4142ca0d +size 324662984 diff --git a/checkpoints/checkpoint-78000/training_args.bin b/checkpoints/checkpoint-78000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-78000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-780000/config.json b/checkpoints/checkpoint-780000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-780000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-780000/model.safetensors b/checkpoints/checkpoint-780000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a33397ccee0bf3755474ce65997cc6810d6618c5 --- /dev/null +++ b/checkpoints/checkpoint-780000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c5c94018412f85640a79fd4e5cf21ff96c7f25a11f270039d52d40302ee1d42 +size 324662984 diff --git a/checkpoints/checkpoint-780000/training_args.bin b/checkpoints/checkpoint-780000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-780000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-785000/config.json b/checkpoints/checkpoint-785000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-785000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-785000/model.safetensors b/checkpoints/checkpoint-785000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e24ca07fb381b2b1ef84722b63e1ebfa974d5598 --- /dev/null +++ b/checkpoints/checkpoint-785000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c841c32156e2a894c33c377003cfb89b928a179a73f1c78ba5e58657cc597771 +size 324662984 diff --git a/checkpoints/checkpoint-785000/training_args.bin b/checkpoints/checkpoint-785000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-785000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-7900/config.json b/checkpoints/checkpoint-7900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-7900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-7900/model.safetensors b/checkpoints/checkpoint-7900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..189334d542dab431961fb89309ad4db49077465a --- /dev/null +++ b/checkpoints/checkpoint-7900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:471f7fdc1146ad6271af86ba2bf826fad674dc42b6478064e620b9454d638352 +size 324662984 diff --git a/checkpoints/checkpoint-7900/training_args.bin b/checkpoints/checkpoint-7900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-7900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-79000/config.json b/checkpoints/checkpoint-79000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-79000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-79000/model.safetensors b/checkpoints/checkpoint-79000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e14ab6cc4b00d113bb48a451b126becb73d9e8a5 --- /dev/null +++ b/checkpoints/checkpoint-79000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44928f1e2877c72ab317f14614b360d9bdb35dd7ea7c9a52ce96fa872c9a4767 +size 324662984 diff --git a/checkpoints/checkpoint-79000/training_args.bin b/checkpoints/checkpoint-79000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-79000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-790000/config.json b/checkpoints/checkpoint-790000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-790000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-790000/model.safetensors b/checkpoints/checkpoint-790000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a3de0dbffe68fc0e1324c8e8abb866a04ac694e5 --- /dev/null +++ b/checkpoints/checkpoint-790000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ae7d067b2c22a52afde5a62d9dde9aada198ca576a1aaded56bea50f46dea93 +size 324662984 diff --git a/checkpoints/checkpoint-790000/training_args.bin b/checkpoints/checkpoint-790000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-790000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-795000/config.json b/checkpoints/checkpoint-795000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-795000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-795000/model.safetensors b/checkpoints/checkpoint-795000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d1144fcb2e47f8b7042a270be5a3eed70f300e02 --- /dev/null +++ b/checkpoints/checkpoint-795000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:edf055a8ea62b39006ae8f08bd5df7f64bc7b9251695fb09be5ef5d6967af2fd +size 324662984 diff --git a/checkpoints/checkpoint-795000/training_args.bin b/checkpoints/checkpoint-795000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-795000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-800/config.json b/checkpoints/checkpoint-800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-800/model.safetensors b/checkpoints/checkpoint-800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..acb45c999bdf559f51bf6ed1e9aeb117bdfd4090 --- /dev/null +++ b/checkpoints/checkpoint-800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bec81e57834083c82810a82e36c69c5271fccde9cbb095d0192482cf06080d7d +size 324662984 diff --git a/checkpoints/checkpoint-800/training_args.bin b/checkpoints/checkpoint-800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8000/config.json b/checkpoints/checkpoint-8000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8000/model.safetensors b/checkpoints/checkpoint-8000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b4f2a3644f2ac16508be87f50159c555cb6207ef --- /dev/null +++ b/checkpoints/checkpoint-8000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1afe0fe7c41021871d758489fdcd99b8851b78671e9e1964104bdbe18d367ee5 +size 324662984 diff --git a/checkpoints/checkpoint-8000/training_args.bin b/checkpoints/checkpoint-8000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-80000/config.json b/checkpoints/checkpoint-80000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-80000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-80000/model.safetensors b/checkpoints/checkpoint-80000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..23b85e400296bec94403de6b0e71395e0be677eb --- /dev/null +++ b/checkpoints/checkpoint-80000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60ea33cbe9714ddeaef1f5b62754875595d2463cdf556430766a27a5687ba593 +size 324662984 diff --git a/checkpoints/checkpoint-80000/training_args.bin b/checkpoints/checkpoint-80000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-80000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-800000/config.json b/checkpoints/checkpoint-800000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-800000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-800000/model.safetensors b/checkpoints/checkpoint-800000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4a8173675125f4da7035eb7400afbb810e210d81 --- /dev/null +++ b/checkpoints/checkpoint-800000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa7848bb2b344f8826b3dc007532b059d143388b2f81d8d92117e915791e1f2e +size 324662984 diff --git a/checkpoints/checkpoint-800000/training_args.bin b/checkpoints/checkpoint-800000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-800000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8100/config.json b/checkpoints/checkpoint-8100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8100/model.safetensors b/checkpoints/checkpoint-8100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..85a668f74b7594788a10b4bebbd702ff6b9dd9fd --- /dev/null +++ b/checkpoints/checkpoint-8100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b808a1611ef1e8149ea8226b4708e9380c9003f016caf92ab4c511748cf7570 +size 324662984 diff --git a/checkpoints/checkpoint-8100/training_args.bin b/checkpoints/checkpoint-8100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-81000/config.json b/checkpoints/checkpoint-81000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-81000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-81000/model.safetensors b/checkpoints/checkpoint-81000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d031c3f8bd7fce046009dcc0ea6ee5fdbf89c58d --- /dev/null +++ b/checkpoints/checkpoint-81000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:826607ddf069313dcfa9a4292c8afe577d5789670241d2ceca35b01405d5a44c +size 324662984 diff --git a/checkpoints/checkpoint-81000/training_args.bin b/checkpoints/checkpoint-81000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-81000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8200/config.json b/checkpoints/checkpoint-8200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8200/model.safetensors b/checkpoints/checkpoint-8200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..93a7d9c2d63a37880059ac395d12182d3136271b --- /dev/null +++ b/checkpoints/checkpoint-8200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b0facd231718c91bac33edebc37f751d04b80f5f7fd6e05cd7a396260f6656e +size 324662984 diff --git a/checkpoints/checkpoint-8200/training_args.bin b/checkpoints/checkpoint-8200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-82000/config.json b/checkpoints/checkpoint-82000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-82000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-82000/model.safetensors b/checkpoints/checkpoint-82000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..694a47e934d6fe95a5a7542e428ec7c1a71b41ff --- /dev/null +++ b/checkpoints/checkpoint-82000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a129a1fbf0c648355e36b9e111f20d0e93d821769cbe37ca69bb7f39b4ab8961 +size 324662984 diff --git a/checkpoints/checkpoint-82000/training_args.bin b/checkpoints/checkpoint-82000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-82000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8300/config.json b/checkpoints/checkpoint-8300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8300/model.safetensors b/checkpoints/checkpoint-8300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..be6874d04cc219a22d9a08f6bf26e3f23c96bf7c --- /dev/null +++ b/checkpoints/checkpoint-8300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9dcbb91a37195fa4fa2c914c8bfbee19748e9c0b0037a90ff4c7c9c1c8c1bf7 +size 324662984 diff --git a/checkpoints/checkpoint-8300/training_args.bin b/checkpoints/checkpoint-8300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-83000/config.json b/checkpoints/checkpoint-83000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-83000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-83000/model.safetensors b/checkpoints/checkpoint-83000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ff70b188a15600cff33ef3d2ee0b3db18325da8c --- /dev/null +++ b/checkpoints/checkpoint-83000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6822674bdb6ae87641cd418fc76c9f7aa4c2c95c5a44f22be186ab6d3eab16a4 +size 324662984 diff --git a/checkpoints/checkpoint-83000/training_args.bin b/checkpoints/checkpoint-83000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-83000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8400/config.json b/checkpoints/checkpoint-8400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8400/model.safetensors b/checkpoints/checkpoint-8400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..21e8a09e1440884d1b664e4e7f473eed1edbe106 --- /dev/null +++ b/checkpoints/checkpoint-8400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83a5fcaa78535baa594331a38700894998a8fccad53ab81d736ba9a2ce9c44ae +size 324662984 diff --git a/checkpoints/checkpoint-8400/training_args.bin b/checkpoints/checkpoint-8400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-84000/config.json b/checkpoints/checkpoint-84000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-84000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-84000/model.safetensors b/checkpoints/checkpoint-84000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..43cd5ad40c27d84bcd04b1c4833a775e121ef74e --- /dev/null +++ b/checkpoints/checkpoint-84000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d373677ff6fd2a258b8901bd00d635d32bf8b85cd72aaa16bec43fda934e79c0 +size 324662984 diff --git a/checkpoints/checkpoint-84000/training_args.bin b/checkpoints/checkpoint-84000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-84000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8500/config.json b/checkpoints/checkpoint-8500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8500/model.safetensors b/checkpoints/checkpoint-8500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e4469814423061d43141330e214ba6069d52eb46 --- /dev/null +++ b/checkpoints/checkpoint-8500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74516f196fb9bd0b1090a78cb0d23b23e8fad64be8ab0489fa9e685dc5fddef5 +size 324662984 diff --git a/checkpoints/checkpoint-8500/training_args.bin b/checkpoints/checkpoint-8500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-85000/config.json b/checkpoints/checkpoint-85000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-85000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-85000/model.safetensors b/checkpoints/checkpoint-85000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ff315725783919dd81a7fc9365e4bc38d3b0ac46 --- /dev/null +++ b/checkpoints/checkpoint-85000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4aca3ec6ec532d0e5a8c12bf501b92b3a2d405e260d65ab6c16655048280db3a +size 324662984 diff --git a/checkpoints/checkpoint-85000/training_args.bin b/checkpoints/checkpoint-85000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-85000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8600/config.json b/checkpoints/checkpoint-8600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8600/model.safetensors b/checkpoints/checkpoint-8600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3d2cb5a044b18729ebbd94a53fce4c2e8c8782aa --- /dev/null +++ b/checkpoints/checkpoint-8600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb0e8de754dfb1bf1275de8bf9134b409f6f31e0eb0a03512fac66df11d6f613 +size 324662984 diff --git a/checkpoints/checkpoint-8600/training_args.bin b/checkpoints/checkpoint-8600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-86000/config.json b/checkpoints/checkpoint-86000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-86000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-86000/model.safetensors b/checkpoints/checkpoint-86000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c7a6f67fc40a3931a88a1a8678d143c6102b197e --- /dev/null +++ b/checkpoints/checkpoint-86000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9bf3470f91e5ca041b9a5cb8018d45a7d667c3d15596afd71d930d6d43c5c59 +size 324662984 diff --git a/checkpoints/checkpoint-86000/training_args.bin b/checkpoints/checkpoint-86000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-86000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8700/config.json b/checkpoints/checkpoint-8700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8700/model.safetensors b/checkpoints/checkpoint-8700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..192f2cc9dde20bde8d637c19e2076704d9bff791 --- /dev/null +++ b/checkpoints/checkpoint-8700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:118a0b27d0b4ba7bc27752a2e7c2f0572be2ebe9e0ad1cd9afaba753e2e8c991 +size 324662984 diff --git a/checkpoints/checkpoint-8700/training_args.bin b/checkpoints/checkpoint-8700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-87000/config.json b/checkpoints/checkpoint-87000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-87000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-87000/model.safetensors b/checkpoints/checkpoint-87000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0081fe2796b8c79f568bb7bc4291b46baf90907b --- /dev/null +++ b/checkpoints/checkpoint-87000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfa38be856d886d66daf011541ef39805d90699c92a8eb575a7451d170f2c404 +size 324662984 diff --git a/checkpoints/checkpoint-87000/training_args.bin b/checkpoints/checkpoint-87000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-87000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8800/config.json b/checkpoints/checkpoint-8800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8800/model.safetensors b/checkpoints/checkpoint-8800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d5181cf4d65940ef47979d6075b7b0d1a9a3aeb9 --- /dev/null +++ b/checkpoints/checkpoint-8800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b481445a3510f9fbe2fd7658719cad5db3e9ff5017655c27912e95be4c77951 +size 324662984 diff --git a/checkpoints/checkpoint-8800/training_args.bin b/checkpoints/checkpoint-8800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-88000/config.json b/checkpoints/checkpoint-88000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-88000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-88000/model.safetensors b/checkpoints/checkpoint-88000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c004e82e159cde9094a8f2d80e7656b4f785570f --- /dev/null +++ b/checkpoints/checkpoint-88000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc943fc06a3977743b96760cac53b52099d4c8c0d1043d689b9e9dc1cf4bbea4 +size 324662984 diff --git a/checkpoints/checkpoint-88000/training_args.bin b/checkpoints/checkpoint-88000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-88000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-8900/config.json b/checkpoints/checkpoint-8900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-8900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-8900/model.safetensors b/checkpoints/checkpoint-8900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..012290890ed5ac85e65001f448f0678397a62a86 --- /dev/null +++ b/checkpoints/checkpoint-8900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7694e8bc0eefd756f86eee595b4cc16ef94cbbe78eec55feb8ebd27133cf2586 +size 324662984 diff --git a/checkpoints/checkpoint-8900/training_args.bin b/checkpoints/checkpoint-8900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-8900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-89000/config.json b/checkpoints/checkpoint-89000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-89000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-89000/model.safetensors b/checkpoints/checkpoint-89000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..51c55b0d75a3ab8bf70b2ed28b7c93711d4111ff --- /dev/null +++ b/checkpoints/checkpoint-89000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60edb1d6aff172bf914066986b05a63778890e37148f5ab324ec6fbf095f9666 +size 324662984 diff --git a/checkpoints/checkpoint-89000/training_args.bin b/checkpoints/checkpoint-89000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-89000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-900/config.json b/checkpoints/checkpoint-900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-900/model.safetensors b/checkpoints/checkpoint-900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e10ac8b2f8fa0863673bcbc6a7f02b086b42b05a --- /dev/null +++ b/checkpoints/checkpoint-900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5a2b8b8702b0b13aee8185d6ed8452e0fe024224dc4e529f02b04a2a67f2116 +size 324662984 diff --git a/checkpoints/checkpoint-900/training_args.bin b/checkpoints/checkpoint-900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9000/config.json b/checkpoints/checkpoint-9000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9000/model.safetensors b/checkpoints/checkpoint-9000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7ac44c9022820f5a2f46bccc1681eaaa4fa8786d --- /dev/null +++ b/checkpoints/checkpoint-9000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60d14f505c690250e0993e8b7803435e6943bc83c13cd17fc414744d1d764c97 +size 324662984 diff --git a/checkpoints/checkpoint-9000/training_args.bin b/checkpoints/checkpoint-9000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-90000/config.json b/checkpoints/checkpoint-90000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-90000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-90000/model.safetensors b/checkpoints/checkpoint-90000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b6d12ff81de21cf9adf5adbe487f0ef2aa4847a4 --- /dev/null +++ b/checkpoints/checkpoint-90000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44aa1a9d69117535ad644d4168b7a164797e48b02b22583dc6486a52766b2a25 +size 324662984 diff --git a/checkpoints/checkpoint-90000/training_args.bin b/checkpoints/checkpoint-90000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-90000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9100/config.json b/checkpoints/checkpoint-9100/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9100/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9100/model.safetensors b/checkpoints/checkpoint-9100/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ce585f5afa236db0f46902f166c3bd7d1538df0f --- /dev/null +++ b/checkpoints/checkpoint-9100/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d7714550bdb7939592ade2afc74de6f9614500f6d68f6fc56ac5eab6e561d4f +size 324662984 diff --git a/checkpoints/checkpoint-9100/training_args.bin b/checkpoints/checkpoint-9100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-91000/config.json b/checkpoints/checkpoint-91000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-91000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-91000/model.safetensors b/checkpoints/checkpoint-91000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8ea06d863de684c9917700d8fc8bd3056b6d89df --- /dev/null +++ b/checkpoints/checkpoint-91000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bb075c1e0b310891511873bd5ddbb16bc6b85467fd19dd18a7533c13f28ba64 +size 324662984 diff --git a/checkpoints/checkpoint-91000/training_args.bin b/checkpoints/checkpoint-91000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-91000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9200/config.json b/checkpoints/checkpoint-9200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9200/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9200/model.safetensors b/checkpoints/checkpoint-9200/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5cc93813eab925b12da03f0f2cb40d9d4fc13d65 --- /dev/null +++ b/checkpoints/checkpoint-9200/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e64356097b132e0e22344fd1319f575107ed582bc370c955affbfab92cf4282e +size 324662984 diff --git a/checkpoints/checkpoint-9200/training_args.bin b/checkpoints/checkpoint-9200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-92000/config.json b/checkpoints/checkpoint-92000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-92000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-92000/model.safetensors b/checkpoints/checkpoint-92000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..232994e24d302a382bd166e09418b225b5421c5a --- /dev/null +++ b/checkpoints/checkpoint-92000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e731da38bc0e319c880dfdb284c2b47ed7188cb6e239b1ee81a83e4e3e588af4 +size 324662984 diff --git a/checkpoints/checkpoint-92000/training_args.bin b/checkpoints/checkpoint-92000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-92000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9300/config.json b/checkpoints/checkpoint-9300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9300/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9300/model.safetensors b/checkpoints/checkpoint-9300/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1af0b96f3ed547be49cbe98d39c7d50cf806f54b --- /dev/null +++ b/checkpoints/checkpoint-9300/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fffd918ec47277d4215e56f2531bc10e0a2f7c227cb5f3ee1dab5a97f7f28917 +size 324662984 diff --git a/checkpoints/checkpoint-9300/training_args.bin b/checkpoints/checkpoint-9300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-93000/config.json b/checkpoints/checkpoint-93000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-93000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-93000/model.safetensors b/checkpoints/checkpoint-93000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8ddba403a3ad458655a5f08356d160233f47c3d8 --- /dev/null +++ b/checkpoints/checkpoint-93000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5034cb0495e6df6cd04887f4764336e05d26a4576f70da4b2ef5dd4ff344bad9 +size 324662984 diff --git a/checkpoints/checkpoint-93000/training_args.bin b/checkpoints/checkpoint-93000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-93000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9400/config.json b/checkpoints/checkpoint-9400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9400/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9400/model.safetensors b/checkpoints/checkpoint-9400/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f68aa34479714d4d6125654204d42d354c918af7 --- /dev/null +++ b/checkpoints/checkpoint-9400/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9343ddcb7dec3a117eb10c2201e7db802d48f9af9da689b0a3e53f70b93bca66 +size 324662984 diff --git a/checkpoints/checkpoint-9400/training_args.bin b/checkpoints/checkpoint-9400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-94000/config.json b/checkpoints/checkpoint-94000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-94000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-94000/model.safetensors b/checkpoints/checkpoint-94000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9f7a00c991a1a87bfea7e9b58488e7c13216878a --- /dev/null +++ b/checkpoints/checkpoint-94000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e398e06e5e3d6c7c262b3eb316ff793d52b76dc55ef22c0f6463171f47fdc23 +size 324662984 diff --git a/checkpoints/checkpoint-94000/training_args.bin b/checkpoints/checkpoint-94000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-94000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9500/config.json b/checkpoints/checkpoint-9500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9500/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9500/model.safetensors b/checkpoints/checkpoint-9500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9ea43e2b9c79db7f0913bced079a4a062c42ba4e --- /dev/null +++ b/checkpoints/checkpoint-9500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45a16754973ce6f09c28cf5e04164d99e92923310a544fbc139f87336b80a1da +size 324662984 diff --git a/checkpoints/checkpoint-9500/training_args.bin b/checkpoints/checkpoint-9500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-95000/config.json b/checkpoints/checkpoint-95000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-95000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-95000/model.safetensors b/checkpoints/checkpoint-95000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b08c2c7e02f935b0f2b9502378c605875b6925e7 --- /dev/null +++ b/checkpoints/checkpoint-95000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b5c6ad358b6cafee37e392122607e93a40acb24922fdcad38e463ed5e71e7be +size 324662984 diff --git a/checkpoints/checkpoint-95000/training_args.bin b/checkpoints/checkpoint-95000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-95000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9600/config.json b/checkpoints/checkpoint-9600/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9600/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9600/model.safetensors b/checkpoints/checkpoint-9600/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..225fb5d3adbf502c733bc6fb8e99f1037e495f17 --- /dev/null +++ b/checkpoints/checkpoint-9600/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:20a5eb0ecf9bf8cfa4b7c2c8ac7d3ff0dca22f7fb666921cd42ec0aea9b99dad +size 324662984 diff --git a/checkpoints/checkpoint-9600/training_args.bin b/checkpoints/checkpoint-9600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-96000/config.json b/checkpoints/checkpoint-96000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-96000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-96000/model.safetensors b/checkpoints/checkpoint-96000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7cb242fd11c03da6cffd2d9128583145da30f73b --- /dev/null +++ b/checkpoints/checkpoint-96000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f7bb06c40bbd07f01edff0c9cbb4e031d524608206186b21b959111792b8a23 +size 324662984 diff --git a/checkpoints/checkpoint-96000/training_args.bin b/checkpoints/checkpoint-96000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-96000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9700/config.json b/checkpoints/checkpoint-9700/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9700/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9700/model.safetensors b/checkpoints/checkpoint-9700/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e4f2453fb28e3b0188dd5f7e1277d785b560965 --- /dev/null +++ b/checkpoints/checkpoint-9700/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b52cb8a2ad042c2b47a6c5e2deaa810bffbd7b722e78ae3188b5cabd2381af7c +size 324662984 diff --git a/checkpoints/checkpoint-9700/training_args.bin b/checkpoints/checkpoint-9700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-97000/config.json b/checkpoints/checkpoint-97000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-97000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-97000/model.safetensors b/checkpoints/checkpoint-97000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..61b765b3690f1b3b48427e6fbf33db4d7c656200 --- /dev/null +++ b/checkpoints/checkpoint-97000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:57879185a21dcdc333a8f65e379f5493dd7fd9d9689cff8e9907d6b5117eaa4a +size 324662984 diff --git a/checkpoints/checkpoint-97000/training_args.bin b/checkpoints/checkpoint-97000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-97000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9800/config.json b/checkpoints/checkpoint-9800/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9800/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9800/model.safetensors b/checkpoints/checkpoint-9800/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..abaaff3bcfef41f4486e42b4c1ad97582d74c25d --- /dev/null +++ b/checkpoints/checkpoint-9800/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47b3dc51254f80e4ae92df7e9377dcc37920cef3c963fc365eed36d7d6fa9caa +size 324662984 diff --git a/checkpoints/checkpoint-9800/training_args.bin b/checkpoints/checkpoint-9800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-98000/config.json b/checkpoints/checkpoint-98000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-98000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-98000/model.safetensors b/checkpoints/checkpoint-98000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ca3650318b754672fae9fed7bfe63cdfe356070c --- /dev/null +++ b/checkpoints/checkpoint-98000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03e7e4cc0e2f227ea43189664068ab0b41596e079f561f22bf516a186246dc17 +size 324662984 diff --git a/checkpoints/checkpoint-98000/training_args.bin b/checkpoints/checkpoint-98000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-98000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-9900/config.json b/checkpoints/checkpoint-9900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-9900/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-9900/model.safetensors b/checkpoints/checkpoint-9900/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e76234abe178791a075fce87492abe0048260cf --- /dev/null +++ b/checkpoints/checkpoint-9900/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6fc948dc1b990f59f4a90efb5075f28467dc04bd2833b618cde45a44bb83306 +size 324662984 diff --git a/checkpoints/checkpoint-9900/training_args.bin b/checkpoints/checkpoint-9900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-9900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520 diff --git a/checkpoints/checkpoint-99000/config.json b/checkpoints/checkpoint-99000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b59b0b4c67b30baa7b62a3a87fc086e8dd1f8916 --- /dev/null +++ b/checkpoints/checkpoint-99000/config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "georgeyw/gpt-2-small-init-seed-5", + "architectures": [ + "GPTNeoXForCausalLM" + ], + "attention_bias": true, + "attention_dropout": 0.0, + "bos_token_id": 0, + "classifier_dropout": 0.1, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout": 0.0, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "layer_norm_epsilon": 1e-05, + "max_position_embeddings": 1024, + "model_type": "gpt_neox", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "rope_scaling": null, + "rotary_emb_base": 10000, + "rotary_pct": 0.25, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.38.2", + "use_cache": true, + "use_parallel_residual": true, + "vocab_size": 50304 +} diff --git a/checkpoints/checkpoint-99000/model.safetensors b/checkpoints/checkpoint-99000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1979b0c2e657af842ded70c52ca5c01f68e37c47 --- /dev/null +++ b/checkpoints/checkpoint-99000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2dc78b5e8fd06053bf0a893d46e69b76ed28ed6c44e898cb170840d14edc890f +size 324662984 diff --git a/checkpoints/checkpoint-99000/training_args.bin b/checkpoints/checkpoint-99000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..b13eeefe91ec34fffd57226cb30f75034f303d5f --- /dev/null +++ b/checkpoints/checkpoint-99000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07671a2ec06f1e2e1209ff63c69f68592015234ed38d9978ac4e0899bcfabf5 +size 6520