InternVL2_5-1B / config.json
thisisiron's picture
Upload InternVL2_5ForConditionalGeneration
b3fda8e verified
{
"_name_or_path": "OpenGVLab/InternVL2_5-1B",
"architectures": [
"InternVL2_5ForConditionalGeneration"
],
"downsample_ratio": 0.5,
"dynamic_image_size": true,
"eos_token_id": 151645,
"force_image_size": 448,
"hidden_size": 896,
"image_end_token_id": 151666,
"image_start_token_id": 151665,
"image_token_id": 151667,
"max_dynamic_patch": 12,
"min_dynamic_patch": 1,
"model_type": "internvl2_5",
"num_image_token": 256,
"pixel_shuffle_version": "v2",
"select_layer": -1,
"text_config": {
"_attn_implementation_autoset": true,
"_name_or_path": "Qwen/Qwen2.5-0.5B-Instruct",
"architectures": [
"Qwen2ForCausalLM"
],
"bos_token_id": 151643,
"eos_token_id": 151645,
"hidden_size": 896,
"intermediate_size": 4864,
"max_window_layers": 21,
"model_type": "qwen2",
"num_attention_heads": 14,
"num_hidden_layers": 24,
"num_key_value_heads": 2,
"rope_theta": 1000000.0,
"torch_dtype": "bfloat16",
"use_bfloat16": true,
"vocab_size": 151674
},
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.49.0.dev0",
"use_thumbnail": true,
"vision_config": {
"_attn_implementation_autoset": true,
"architectures": [
"InternVL2_5VisionModel"
],
"hidden_size": 1024,
"image_size": 448,
"initializer_factor": 1.0,
"intermediate_size": 4096,
"model_type": "internvl2_5",
"norm_type": "layer_norm",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"qk_normalization": false,
"qkv_bias": true,
"torch_dtype": "bfloat16"
}
}