ar5entum commited on
Commit
4c706c8
·
verified ·
1 Parent(s): a2d6c70

Training in progress, epoch 2

Browse files
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:34e841c12f03ea804feb76b3c2ba0f213bd1da939d3dc30ab60f181f123168aa
3
  size 197603580
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2fd6319f74363ad27ef97566cc4242931e1d6be346e7aa018a4c0642baaaf3e
3
  size 197603580
runs/Aug30_09-47-56_abb4112c7cf2/events.out.tfevents.1725011600.abb4112c7cf2.192.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea3d3aa226152032508535f487baeee2e1674a3140a5b17bcbcea6d7e8f754fc
3
+ size 891034
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
@@ -298,7 +303,7 @@
298
  },
299
  {
300
  "SpecialToken": {
301
- "id": "hi_IN",
302
  "type_id": 0
303
  }
304
  }
@@ -324,7 +329,7 @@
324
  },
325
  {
326
  "SpecialToken": {
327
- "id": "hi_IN",
328
  "type_id": 0
329
  }
330
  }
@@ -339,13 +344,13 @@
339
  "</s>"
340
  ]
341
  },
342
- "hi_IN": {
343
- "id": "hi_IN",
344
  "ids": [
345
- 65545
346
  ],
347
  "tokens": [
348
- "hi_IN"
349
  ]
350
  }
351
  }
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 128,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
303
  },
304
  {
305
  "SpecialToken": {
306
+ "id": "en",
307
  "type_id": 0
308
  }
309
  }
 
329
  },
330
  {
331
  "SpecialToken": {
332
+ "id": "en",
333
  "type_id": 0
334
  }
335
  }
 
344
  "</s>"
345
  ]
346
  },
347
+ "en": {
348
+ "id": "en",
349
  "ids": [
350
+ 304
351
  ],
352
  "tokens": [
353
+ "en"
354
  ]
355
  }
356
  }
tokenizer_config.json CHANGED
@@ -279,8 +279,8 @@
279
  "model_max_length": 1024,
280
  "pad_token": "<pad>",
281
  "sep_token": "</s>",
282
- "src_lang": "hi_IN",
283
- "tgt_lang": "hi_IN",
284
  "tokenizer_class": "MBartTokenizer",
285
  "trim_offsets": true,
286
  "unk_token": "<unk>"
 
279
  "model_max_length": 1024,
280
  "pad_token": "<pad>",
281
  "sep_token": "</s>",
282
+ "src_lang": "en",
283
+ "tgt_lang": "hi",
284
  "tokenizer_class": "MBartTokenizer",
285
  "trim_offsets": true,
286
  "unk_token": "<unk>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9ab7eba83a79e87db243724a12249417762143b24d1e6428dc9f11c29fa98ab
3
- size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5cefd13d7e36f598c53e3da03fa705e93566caa007b9457506e837333b7d903
3
+ size 4923