Training in progress, step 500
Browse files- config.json +3 -3
- model.safetensors +2 -2
- runs/Dec05_17-59-04_d9d9476b9d67/events.out.tfevents.1733421547.d9d9476b9d67.401.0 +3 -0
- runs/Dec05_17-59-44_d9d9476b9d67/events.out.tfevents.1733421585.d9d9476b9d67.401.1 +3 -0
- runs/Dec05_17-59-54_d9d9476b9d67/events.out.tfevents.1733421594.d9d9476b9d67.401.2 +3 -0
- tokenizer.json +10 -1
- training_args.bin +2 -2
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "google/long-t5-
|
3 |
"architectures": [
|
4 |
"LongT5ForConditionalGeneration"
|
5 |
],
|
@@ -9,7 +9,7 @@
|
|
9 |
"decoder_start_token_id": 0,
|
10 |
"dense_act_fn": "gelu_new",
|
11 |
"dropout_rate": 0.1,
|
12 |
-
"encoder_attention_type": "
|
13 |
"eos_token_id": 1,
|
14 |
"feed_forward_proj": "gated-gelu",
|
15 |
"global_block_size": 16,
|
@@ -29,7 +29,7 @@
|
|
29 |
"relative_attention_num_buckets": 32,
|
30 |
"tie_word_embeddings": false,
|
31 |
"torch_dtype": "float32",
|
32 |
-
"transformers_version": "4.46.
|
33 |
"use_cache": true,
|
34 |
"vocab_size": 32128
|
35 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "google/long-t5-tglobal-base",
|
3 |
"architectures": [
|
4 |
"LongT5ForConditionalGeneration"
|
5 |
],
|
|
|
9 |
"decoder_start_token_id": 0,
|
10 |
"dense_act_fn": "gelu_new",
|
11 |
"dropout_rate": 0.1,
|
12 |
+
"encoder_attention_type": "transient-global",
|
13 |
"eos_token_id": 1,
|
14 |
"feed_forward_proj": "gated-gelu",
|
15 |
"global_block_size": 16,
|
|
|
29 |
"relative_attention_num_buckets": 32,
|
30 |
"tie_word_embeddings": false,
|
31 |
"torch_dtype": "float32",
|
32 |
+
"transformers_version": "4.46.3",
|
33 |
"use_cache": true,
|
34 |
"vocab_size": 32128
|
35 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5262ebc1956a5e5e53b4a46ffdb877dee40648cb4599bb18c1b98aef9b7da640
|
3 |
+
size 1187780840
|
runs/Dec05_17-59-04_d9d9476b9d67/events.out.tfevents.1733421547.d9d9476b9d67.401.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb5d75ca27173e129d58bce63f1b5ee5ace78fff67aca569b064a68bfd71fb56
|
3 |
+
size 5445
|
runs/Dec05_17-59-44_d9d9476b9d67/events.out.tfevents.1733421585.d9d9476b9d67.401.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6cffe45ba9c0481edceb681385037e080189360c9b0a1f86d28f78a6f5cd7a0
|
3 |
+
size 5445
|
runs/Dec05_17-59-54_d9d9476b9d67/events.out.tfevents.1733421594.d9d9476b9d67.401.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e2c05b0cb99a7cce77bf89926b74453c083178b1d37bbb26e62fe2bc97af0fd
|
3 |
+
size 5656
|
tokenizer.json
CHANGED
@@ -6,7 +6,16 @@
|
|
6 |
"strategy": "LongestFirst",
|
7 |
"stride": 0
|
8 |
},
|
9 |
-
"padding":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
"added_tokens": [
|
11 |
{
|
12 |
"id": 0,
|
|
|
6 |
"strategy": "LongestFirst",
|
7 |
"stride": 0
|
8 |
},
|
9 |
+
"padding": {
|
10 |
+
"strategy": {
|
11 |
+
"Fixed": 512
|
12 |
+
},
|
13 |
+
"direction": "Right",
|
14 |
+
"pad_to_multiple_of": null,
|
15 |
+
"pad_id": 0,
|
16 |
+
"pad_type_id": 0,
|
17 |
+
"pad_token": "<pad>"
|
18 |
+
},
|
19 |
"added_tokens": [
|
20 |
{
|
21 |
"id": 0,
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5cf3645329d5931c4d7daa763a415555ae15d0f7cd1141faadbb249ecf09de18
|
3 |
+
size 5496
|