weiyi01191 commited on
Commit
c8360a7
·
verified ·
1 Parent(s): 4bdf408

Update test_configs/mistral_test_config.yaml

Browse files
test_configs/mistral_test_config.yaml CHANGED
@@ -2,19 +2,19 @@ model:
2
  arch: mini_gpt4_llama_v2
3
  freeze_vit: True
4
  freeze_qformer: True
5
- max_txt_len: 512
6
  low_resource: True
7
  image_size: 224
8
  end_sym: "</s>"
9
  llama_model: "mistralai/Mistral-7B-Instruct-v0.2"
10
- ckpt: "checkpoints/video_mistral_all_checkpoint_last.pth"
11
  use_grad_checkpoint: True
12
  chat_template: True
13
  lora_r: 64
14
  lora_alpha: 16
15
- length: 50
16
  use_grad_checkpoint_llm: True
17
- max_context_len: 7200
18
  architectures: [
19
  "MiniGPT4_Video"
20
  ]
@@ -22,9 +22,9 @@ model:
22
  drop_path_rate: 0
23
  img_size: 224
24
  model_type: "minigpt4_video"
25
- num_query_token: 32
26
  prompt: ""
27
- torch_dtype: "float32"
28
  transformers_version: "4.42.3"
29
  vit_precision: "fp16"
30
  vit_model: "eva_clip_g"
 
2
  arch: mini_gpt4_llama_v2
3
  freeze_vit: True
4
  freeze_qformer: True
5
+ max_txt_len: 384
6
  low_resource: True
7
  image_size: 224
8
  end_sym: "</s>"
9
  llama_model: "mistralai/Mistral-7B-Instruct-v0.2"
10
+ ckpt: "checkpoints/video_mistral_checkpoint_last.pth"
11
  use_grad_checkpoint: True
12
  chat_template: True
13
  lora_r: 64
14
  lora_alpha: 16
15
+ length: 32
16
  use_grad_checkpoint_llm: True
17
+ max_context_len: 4096
18
  architectures: [
19
  "MiniGPT4_Video"
20
  ]
 
22
  drop_path_rate: 0
23
  img_size: 224
24
  model_type: "minigpt4_video"
25
+ num_query_token: 24
26
  prompt: ""
27
+ torch_dtype: "float16"
28
  transformers_version: "4.42.3"
29
  vit_precision: "fp16"
30
  vit_model: "eva_clip_g"