Tommert25 commited on
Commit
de1bbf5
·
1 Parent(s): 59024ab

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "Tommert25/RobBERTBestModelOct11",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "pdelobelle/robbert-v2-dutch-base",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5eeb39e92ee52836c23bb275ca406fe48db70fc4d3905d9bcd0dfe66016edc0
3
  size 464775913
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b29c1d92893fd1659ddbf995ab894d70fb52b045a63fc06666be7933a836645
3
  size 464775913
special_tokens_map.json CHANGED
@@ -1,11 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- "<s>",
4
- "<pad>",
5
- "</s>",
6
- "<unk>",
7
- "<mask>"
8
- ],
9
  "bos_token": "<s>",
10
  "cls_token": "<s>",
11
  "eos_token": "</s>",
 
1
  {
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
tokenizer_config.json CHANGED
@@ -42,30 +42,17 @@
42
  "special": true
43
  }
44
  },
45
- "additional_special_tokens": [
46
- "<s>",
47
- "<pad>",
48
- "</s>",
49
- "<unk>",
50
- "<mask>"
51
- ],
52
  "bos_token": "<s>",
53
  "clean_up_tokenization_spaces": true,
54
  "cls_token": "<s>",
55
  "eos_token": "</s>",
56
  "errors": "replace",
57
  "mask_token": "<mask>",
58
- "max_length": 512,
59
  "model_max_length": 512,
60
- "pad_to_multiple_of": null,
61
  "pad_token": "<pad>",
62
- "pad_token_type_id": 0,
63
- "padding_side": "right",
64
  "sep_token": "</s>",
65
- "stride": 0,
66
  "tokenizer_class": "RobertaTokenizer",
67
  "trim_offsets": true,
68
- "truncation_side": "right",
69
- "truncation_strategy": "longest_first",
70
  "unk_token": "<unk>"
71
  }
 
42
  "special": true
43
  }
44
  },
45
+ "additional_special_tokens": [],
 
 
 
 
 
 
46
  "bos_token": "<s>",
47
  "clean_up_tokenization_spaces": true,
48
  "cls_token": "<s>",
49
  "eos_token": "</s>",
50
  "errors": "replace",
51
  "mask_token": "<mask>",
 
52
  "model_max_length": 512,
 
53
  "pad_token": "<pad>",
 
 
54
  "sep_token": "</s>",
 
55
  "tokenizer_class": "RobertaTokenizer",
56
  "trim_offsets": true,
 
 
57
  "unk_token": "<unk>"
58
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:37badb99ac87cee8a53f194cae92da78cdc56f627c79197670c31661eb2704fa
3
  size 4091
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01e9c07d9d6b67a9d08fcbf059c852d8b862293b8ddf83681fb0e572a5c255fa
3
  size 4091