Transformers
remyx
salma-remyx commited on
Commit
a804134
·
verified ·
1 Parent(s): 8e18da9

Upload all files and subdirectories

Browse files
checkpoints/latest-checkpoint.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:99338d823afb26cc19e521f91177b2e74854f6c5fa2b78f8e6e92eb7a3b52d2a
3
  size 32226047291
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35a3bc428feb88af6961ed0ab962dafa99ba7bb59daf2fac52deaeaebab5e94e
3
  size 32226047291
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
  "dataset": {
3
  "align_stage_components": [
4
- "/home/ubuntu/spacellava_data/data/dataset.json",
5
- "/home/ubuntu/spacellava_data/data"
6
  ],
7
- "dataset_id": "spacellava",
8
- "dataset_root_dir": "/home/ubuntu/spacellava_data/data",
9
  "finetune_stage_components": [
10
- "/home/ubuntu/spacellava_data/data/dataset.json",
11
- "/home/ubuntu/spacellava_data/data"
12
  ],
13
- "type": "spacellava"
14
  },
15
  "hf_token": ".hf_token",
16
  "model": {
@@ -40,13 +40,13 @@
40
  "image_resize_strategy": "letterbox",
41
  "llm_backbone_id": "llama3-1-8b-pure",
42
  "llm_max_length": 2048,
43
- "model_id": "llama3-based-224-4epoch",
44
  "reduce_in_full_precision": false,
45
  "type": "one-stage+7b",
46
  "vision_backbone_id": "dinosiglip-vit-so-224px"
47
  },
48
  "pretrained_checkpoint": null,
49
- "run_id": "spacellava+llama3-based-224-4epoch+stage-finetune+x7",
50
  "run_root_dir": "runs",
51
  "seed": 7,
52
  "stage": "finetune",
@@ -56,4 +56,4 @@
56
  ],
57
  "wandb_entity": "smellslikeml",
58
  "wandb_project": "prismatic"
59
- }
 
1
  {
2
  "dataset": {
3
  "align_stage_components": [
4
+ "download/llava-laion-cc-sbu-558k/chat.json",
5
+ "download/llava-laion-cc-sbu-558k"
6
  ],
7
+ "dataset_id": "llava-lrv-spacellava",
8
+ "dataset_root_dir": "/home/ubuntu/prismatic-vlms",
9
  "finetune_stage_components": [
10
+ "download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json",
11
+ "download/llava-v1.5-instruct"
12
  ],
13
+ "type": "llava-lrv-spacellava"
14
  },
15
  "hf_token": ".hf_token",
16
  "model": {
 
40
  "image_resize_strategy": "letterbox",
41
  "llm_backbone_id": "llama3-1-8b-pure",
42
  "llm_max_length": 2048,
43
+ "model_id": "llama3-based",
44
  "reduce_in_full_precision": false,
45
  "type": "one-stage+7b",
46
  "vision_backbone_id": "dinosiglip-vit-so-224px"
47
  },
48
  "pretrained_checkpoint": null,
49
+ "run_id": "llava-lrv-spacellava+llama3-based+stage-finetune+x7",
50
  "run_root_dir": "runs",
51
  "seed": 7,
52
  "stage": "finetune",
 
56
  ],
57
  "wandb_entity": "smellslikeml",
58
  "wandb_project": "prismatic"
59
+ }
config.yaml CHANGED
@@ -1,13 +1,13 @@
1
  dataset:
2
  align_stage_components:
3
- - /home/ubuntu/spacellava_data/data/dataset.json
4
- - /home/ubuntu/spacellava_data/data
5
- dataset_id: spacellava
6
- dataset_root_dir: /home/ubuntu/spacellava_data/data
7
  finetune_stage_components:
8
- - /home/ubuntu/spacellava_data/data/dataset.json
9
- - /home/ubuntu/spacellava_data/data
10
- type: spacellava
11
  hf_token: .hf_token
12
  model:
13
  align_epochs: 1
@@ -36,12 +36,12 @@ model:
36
  image_resize_strategy: letterbox
37
  llm_backbone_id: llama3-1-8b-pure
38
  llm_max_length: 2048
39
- model_id: llama3-based-224-4epoch
40
  reduce_in_full_precision: false
41
  type: one-stage+7b
42
  vision_backbone_id: dinosiglip-vit-so-224px
43
  pretrained_checkpoint: null
44
- run_id: spacellava+llama3-based-224-4epoch+stage-finetune+x7
45
  run_root_dir: runs
46
  seed: 7
47
  stage: finetune
 
1
  dataset:
2
  align_stage_components:
3
+ - download/llava-laion-cc-sbu-558k/chat.json
4
+ - download/llava-laion-cc-sbu-558k
5
+ dataset_id: llava-lrv-spacellava
6
+ dataset_root_dir: /home/ubuntu/prismatic-vlms
7
  finetune_stage_components:
8
+ - download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json
9
+ - download/llava-v1.5-instruct
10
+ type: llava-lrv-spacellava
11
  hf_token: .hf_token
12
  model:
13
  align_epochs: 1
 
36
  image_resize_strategy: letterbox
37
  llm_backbone_id: llama3-1-8b-pure
38
  llm_max_length: 2048
39
+ model_id: llama3-based
40
  reduce_in_full_precision: false
41
  type: one-stage+7b
42
  vision_backbone_id: dinosiglip-vit-so-224px
43
  pretrained_checkpoint: null
44
+ run_id: llava-lrv-spacellava+llama3-based+stage-finetune+x7
45
  run_root_dir: runs
46
  seed: 7
47
  stage: finetune
llava-lrv-spacellava+llama3-based+stage-finetune+x7.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
run-metrics.jsonl CHANGED
@@ -1 +1 @@
1
- {"hparams": {"dataset": {"align_stage_components": ["/home/ubuntu/spacellava_data/data/dataset.json", "/home/ubuntu/spacellava_data/data"], "dataset_id": "spacellava", "dataset_root_dir": "/home/ubuntu/spacellava_data/data", "finetune_stage_components": ["/home/ubuntu/spacellava_data/data/dataset.json", "/home/ubuntu/spacellava_data/data"], "type": "spacellava"}, "hf_token": ".hf_token", "model": {"align_epochs": 1, "align_global_batch_size": 4, "align_learning_rate": 0.001, "align_lr_scheduler_type": "linear-warmup+cosine-decay", "align_max_grad_norm": 1.0, "align_max_steps": null, "align_per_device_batch_size": 1, "align_train_strategy": "fsdp-shard-grad-op", "align_warmup_ratio": 0.03, "align_weight_decay": 0.0, "arch_specifier": "no-align+gelu-mlp", "enable_gradient_checkpointing": true, "enable_mixed_precision_training": true, "finetune_epochs": 3, "finetune_global_batch_size": 128, "finetune_learning_rate": 2e-06, "finetune_lr_scheduler_type": "linear-warmup+cosine-decay", "finetune_max_grad_norm": 1.0, "finetune_max_steps": null, "finetune_per_device_batch_size": 4, "finetune_train_strategy": "fsdp-full-shard", "finetune_warmup_ratio": 0.03, "finetune_weight_decay": 0.1, "image_resize_strategy": "letterbox", "llm_backbone_id": "llama3-1-8b-pure", "llm_max_length": 2048, "model_id": "llama3-based-224-4epoch", "reduce_in_full_precision": false, "type": "one-stage+7b", "vision_backbone_id": "dinosiglip-vit-so-224px"}, "pretrained_checkpoint": null, "run_id": "spacellava+llama3-based-224-4epoch+stage-finetune+x7", "run_root_dir": "runs", "seed": 7, "stage": "finetune", "trackers": ["jsonl", "wandb"], "wandb_entity": "smellslikeml", "wandb_project": "prismatic"}, "run_id": "spacellava+llama3-based-224-4epoch+stage-finetune+x7"}
 
1
+ {"hparams": {"dataset": {"align_stage_components": ["download/llava-laion-cc-sbu-558k/chat.json", "download/llava-laion-cc-sbu-558k"], "dataset_id": "llava-lrv-spacellava", "dataset_root_dir": "/home/ubuntu/prismatic-vlms", "finetune_stage_components": ["download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json", "download/llava-v1.5-instruct"], "type": "llava-lrv-spacellava"}, "hf_token": ".hf_token", "model": {"align_epochs": 1, "align_global_batch_size": 4, "align_learning_rate": 0.001, "align_lr_scheduler_type": "linear-warmup+cosine-decay", "align_max_grad_norm": 1.0, "align_max_steps": null, "align_per_device_batch_size": 1, "align_train_strategy": "fsdp-shard-grad-op", "align_warmup_ratio": 0.03, "align_weight_decay": 0.0, "arch_specifier": "no-align+gelu-mlp", "enable_gradient_checkpointing": true, "enable_mixed_precision_training": true, "finetune_epochs": 3, "finetune_global_batch_size": 128, "finetune_learning_rate": 2e-06, "finetune_lr_scheduler_type": "linear-warmup+cosine-decay", "finetune_max_grad_norm": 1.0, "finetune_max_steps": null, "finetune_per_device_batch_size": 4, "finetune_train_strategy": "fsdp-full-shard", "finetune_warmup_ratio": 0.03, "finetune_weight_decay": 0.1, "image_resize_strategy": "letterbox", "llm_backbone_id": "llama3-1-8b-pure", "llm_max_length": 2048, "model_id": "llama3-based", "reduce_in_full_precision": false, "type": "one-stage+7b", "vision_backbone_id": "dinosiglip-vit-so-224px"}, "pretrained_checkpoint": null, "run_id": "llava-lrv-spacellava+llama3-based+stage-finetune+x7", "run_root_dir": "runs", "seed": 7, "stage": "finetune", "trackers": ["jsonl", "wandb"], "wandb_entity": "smellslikeml", "wandb_project": "prismatic"}, "run_id": "llava-lrv-spacellava+llama3-based+stage-finetune+x7"}