Upload 4 files
Browse files- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +6 -0
- trainer_config.yaml +11 -0
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {},
|
3 |
+
"clean_up_tokenization_spaces": false,
|
4 |
+
"model_max_length": 1000000000000000019884624838656,
|
5 |
+
"tokenizer_class": "PreTrainedTokenizerFast"
|
6 |
+
}
|
trainer_config.yaml
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
cls: HF
|
2 |
+
base_tokenizer_path: microsoft/Phi-3-mini-128k-instruct
|
3 |
+
dataset:
|
4 |
+
path: allenai/c4
|
5 |
+
data_dir: fr
|
6 |
+
name: c4_fr
|
7 |
+
split: train
|
8 |
+
column: text
|
9 |
+
target_num_hyper_token: 64
|
10 |
+
batch_size: 1000
|
11 |
+
total_training_size: 100000
|