End of training
Browse files- README.md +6 -6
- emissions.csv +1 -1
- model.safetensors +1 -1
- tokenizer.json +16 -2
README.md
CHANGED
@@ -16,7 +16,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
16 |
|
17 |
This model is a fine-tuned version of [gpt2](https://huggingface.co/gpt2) on an unknown dataset.
|
18 |
It achieves the following results on the evaluation set:
|
19 |
-
- Loss:
|
20 |
|
21 |
## Model description
|
22 |
|
@@ -46,11 +46,11 @@ The following hyperparameters were used during training:
|
|
46 |
|
47 |
### Training results
|
48 |
|
49 |
-
| Training Loss | Epoch | Step
|
50 |
-
|
51 |
-
|
|
52 |
-
|
|
53 |
-
|
|
54 |
|
55 |
|
56 |
### Framework versions
|
|
|
16 |
|
17 |
This model is a fine-tuned version of [gpt2](https://huggingface.co/gpt2) on an unknown dataset.
|
18 |
It achieves the following results on the evaluation set:
|
19 |
+
- Loss: 1.8101
|
20 |
|
21 |
## Model description
|
22 |
|
|
|
46 |
|
47 |
### Training results
|
48 |
|
49 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
50 |
+
|:-------------:|:-----:|:-----:|:---------------:|
|
51 |
+
| 1.0011 | 1.0 | 24037 | 1.9369 |
|
52 |
+
| 0.9472 | 2.0 | 48074 | 1.8378 |
|
53 |
+
| 0.9145 | 3.0 | 72111 | 1.8101 |
|
54 |
|
55 |
|
56 |
### Framework versions
|
emissions.csv
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
timestamp,project_name,run_id,experiment_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
|
2 |
-
2025-02-
|
|
|
1 |
timestamp,project_name,run_id,experiment_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
|
2 |
+
2025-02-25T12:05:29,codecarbon,e0942870-54e4-4907-aedf-43a2d2ad0215,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,21151.06853593886,0.3749755749400021,1.772844593183848e-05,42.5,263.6295483964576,94.34470081329346,0.2495480891734155,2.7587781606318345,0.553947055534731,3.562273305339978,Luxembourg,LUX,luxembourg,,,Linux-6.8.0-48-generic-x86_64-with-glibc2.39,3.12.3,2.8.3,64,AMD EPYC 9124 16-Core Processor,2,2 x NVIDIA L40S,6.1294,49.6113,251.58586883544922,machine,N,1.0
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 497774208
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6946ed62dd5eed0f49cceb439be2032d0d2a2231d66023b07cab9a055156a5bf
|
3 |
size 497774208
|
tokenizer.json
CHANGED
@@ -1,7 +1,21 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
"added_tokens": [
|
6 |
{
|
7 |
"id": 50256,
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": {
|
4 |
+
"direction": "Right",
|
5 |
+
"max_length": 512,
|
6 |
+
"strategy": "LongestFirst",
|
7 |
+
"stride": 0
|
8 |
+
},
|
9 |
+
"padding": {
|
10 |
+
"strategy": {
|
11 |
+
"Fixed": 512
|
12 |
+
},
|
13 |
+
"direction": "Right",
|
14 |
+
"pad_to_multiple_of": null,
|
15 |
+
"pad_id": 50256,
|
16 |
+
"pad_type_id": 0,
|
17 |
+
"pad_token": "<|endoftext|>"
|
18 |
+
},
|
19 |
"added_tokens": [
|
20 |
{
|
21 |
"id": 50256,
|