{ "_name_or_path": "Vividbot/vivid-4b", "architectures": [ "MPTForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "attn_config": { "alibi": true, "alibi_bias_max": 8, "attn_impl": "torch", "attn_pdrop": 0.0, "attn_type": "multihead_attention", "attn_uses_sequence_id": false, "clip_qkv": null, "prefix_lm": false, "qk_ln": false, "softmax_scale": null }, "auto_map": { "AutoConfig": "vinai/PhoGPT-4B-Chat--configuration_mpt.MPTConfig", "AutoModelForCausalLM": "vinai/PhoGPT-4B-Chat--modeling_mpt.MPTForCausalLM" }, "bos_token_id": 1, "d_model": 3072, "emb_pdrop": 0.0, "embedding_fraction": 1.0, "eos_token_id": 2, "expansion_ratio": 4, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 4096, "init_config": { "emb_init_std": null, "emb_init_uniform_lim": null, "fan_mode": "fan_in", "init_div_is_residual": true, "init_gain": 0.0, "init_nonlinearity": "relu", "init_std": null, "name": "kaiming_normal_", "verbose": 0 }, "init_device": "cpu", "initializer_range": 0.02, "intermediate_size": 11008, "learned_pos_emb": true, "logit_scale": null, "max_position_embeddings": 2048, "max_seq_len": 8192, "mlp_bias": false, "mm_hidden_size": 1024, "mm_use_im_start_end": true, "mm_vision_select_layer": -2, "mm_vision_tower": "openai/clip-vit-large-patch14", "model_type": "vivid", "n_heads": 24, "n_layers": 32, "no_bias": false, "norm_type": "low_precision_layernorm", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "resid_pdrop": 0.0, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "sep_image_conv_front": false, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.41.2", "tune_mm_mlp_adapter": true, "use_cache": true, "use_delta_transformer": false, "use_mm_proj": true, "use_patch_importance_pooling": false, "verbose": 0, "vocab_size": 20486 }