Factral commited on
Commit
de487b2
·
verified ·
1 Parent(s): cdea4b3

Training in progress epoch 0

Browse files
Files changed (4) hide show
  1. .gitignore +2 -0
  2. all_results.json +1 -0
  3. config.json +51 -0
  4. model.safetensors +3 -0
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ step_*
2
+ epoch_*
all_results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eval_accuracy": 0.9287257019438445, "eval_precision": 0.9015978057889823, "eval_recall": 0.81784848506806, "eval_f1": 0.8523381967826412, "eval_specificity": 0.9481347199481774, "eval_train_loss": 0.0594653713292089, "eval_epoch": 99, "eval_step": 11600}
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "timm/vit_base_patch16_224.augreg_in21k_ft_in1k",
3
+ "architecture": "vit_base_patch16_224",
4
+ "architectures": [
5
+ "TimmWrapperForImageClassification"
6
+ ],
7
+ "do_pooling": true,
8
+ "finetuning_task": "image-classification",
9
+ "global_pool": "token",
10
+ "initializer_range": 0.02,
11
+ "label_names": [
12
+ "fermentado",
13
+ "hongo",
14
+ "insecto",
15
+ "insufi_fermen",
16
+ "pizarroso",
17
+ "violeta"
18
+ ],
19
+ "model_type": "timm_wrapper",
20
+ "num_classes": 6,
21
+ "num_features": 768,
22
+ "pretrained_cfg": {
23
+ "classifier": "head",
24
+ "crop_mode": "center",
25
+ "crop_pct": 0.9,
26
+ "custom_load": true,
27
+ "first_conv": "patch_embed.proj",
28
+ "fixed_input_size": true,
29
+ "input_size": [
30
+ 3,
31
+ 224,
32
+ 224
33
+ ],
34
+ "interpolation": "bicubic",
35
+ "mean": [
36
+ 0.5,
37
+ 0.5,
38
+ 0.5
39
+ ],
40
+ "pool_size": null,
41
+ "std": [
42
+ 0.5,
43
+ 0.5,
44
+ 0.5
45
+ ],
46
+ "tag": "augreg_in21k_ft_in1k"
47
+ },
48
+ "problem_type": "single_label_classification",
49
+ "torch_dtype": "float32",
50
+ "transformers_version": "4.48.0"
51
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa193702912feb0892e0d63344c803aaafd6452c48ba3fa65e6da9a9352d1a60
3
+ size 343227200