Commit
21861c8
·
verified ·
1 Parent(s): 302116c

Synchronizing local compiler cache.

Browse files
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.2.0.dev1/inference/mixtral/dacorvo/Mixtral-tiny/678cf23ea51877d5956c.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_entry_class": "SingleModelCacheEntry", "_model_id": "dacorvo/Mixtral-tiny", "_task": "text-generation", "architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "head_dim": 32, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "use_cache": true, "vocab_size": 32000}