prasadsachin's picture
Upload folder using huggingface_hub
7afd15f verified
raw
history blame contribute delete
2.94 kB
{
"module": "keras_hub.src.models.mixtral.mixtral_causal_lm",
"class_name": "MixtralCausalLM",
"config": {
"backbone": {
"module": "keras_hub.src.models.mixtral.mixtral_backbone",
"class_name": "MixtralBackbone",
"config": {
"name": "mixtral_backbone",
"trainable": true,
"vocabulary_size": 32000,
"num_layers": 32,
"num_query_heads": 32,
"hidden_dim": 4096,
"intermediate_dim": 14336,
"num_experts": 8,
"top_k": 2,
"router_jitter_noise": 0.0,
"rope_max_wavelength": 1000000.0,
"rope_scaling_factor": 1.0,
"num_key_value_heads": 8,
"router_aux_loss_coef": 0.02,
"sliding_window": null,
"layer_norm_epsilon": 1e-05,
"dropout": 0
},
"registered_name": "keras_hub>MixtralBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.mixtral.mixtral_causal_lm_preprocessor",
"class_name": "MixtralCausalLMPreprocessor",
"config": {
"name": "mixtral_causal_lm_preprocessor_2",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"tokenizer": {
"module": "keras_hub.src.models.mixtral.mixtral_tokenizer",
"class_name": "MixtralTokenizer",
"config": {
"name": "mixtral_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"proto": null,
"sequence_length": null,
"add_bos": false,
"add_eos": false
},
"registered_name": "keras_hub>MixtralTokenizer"
},
"config_file": "preprocessor.json",
"sequence_length": 1024,
"add_start_token": true,
"add_end_token": true
},
"registered_name": "keras_hub>MixtralCausalLMPreprocessor"
},
"name": "mixtral_causal_lm"
},
"registered_name": "keras_hub>MixtralCausalLM"
}