optimum-internal-testing-user commited on
Commit
b8a35ec
·
verified ·
1 Parent(s): 52021f8

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +36 -0
  2. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/509463403db38e0fee4e.json +73 -0
  3. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/d31ad1959dd2765771c4.json +73 -0
  4. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/e4aca6f4dcde393f9fd2.json +73 -0
  5. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/5968d5257cc3367062f1.json +77 -0
  6. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/78b0302cf20f0aaea136.json +77 -0
  7. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/dc140bf4a30b563525db.json +77 -0
  8. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/unsloth/Llama-3.2-1B-Instruct/0c1a326b7a025a55c9b3.json +78 -0
  9. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/06f279f394a1e3fbaf6f.json +73 -0
  10. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/6b80cf4267240f3f212b.json +73 -0
  11. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/94cc933909cf5aea3d18.json +73 -0
  12. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/277152fa4a9b26bbac30.json +74 -0
  13. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/8b02c02ae4bc0249d20c.json +74 -0
  14. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/dc26fd0754ce079f678e.json +74 -0
  15. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/23feb84373f0fa7bf154.json +75 -0
  16. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/5b1a5843da4e0e4184af.json +75 -0
  17. neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/a037fcc35a84f45b912d.json +75 -0
  18. neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/compile_flags.json +1 -0
  19. neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.done +0 -0
  20. neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.hlo_module.pb +3 -0
  21. neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.neff +3 -0
  22. neuronxcc-2.18.121.0+9e31e41a/MODULE_046ebd86c77dc4a94c6c+5be477de/model.neff +1 -1
  23. neuronxcc-2.18.121.0+9e31e41a/MODULE_046ebd86c77dc4a94c6c+5be477de/wrapped_neff.hlo +1 -1
  24. neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/compile_flags.json +1 -0
  25. neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.done +0 -0
  26. neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.hlo_module.pb +3 -0
  27. neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff +3 -0
  28. neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo +3 -0
  29. neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/compile_flags.json +1 -0
  30. neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.done +0 -0
  31. neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.hlo_module.pb +3 -0
  32. neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.neff +0 -0
  33. neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/compile_flags.json +1 -0
  34. neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.done +0 -0
  35. neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.hlo_module.pb +3 -0
  36. neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.neff +3 -0
  37. neuronxcc-2.18.121.0+9e31e41a/MODULE_18642e0fd797db5b7fcb+431f5505/model.neff +1 -1
  38. neuronxcc-2.18.121.0+9e31e41a/MODULE_21d49e164d26352245e5+84f3e719/model.hlo_module.pb +1 -1
  39. neuronxcc-2.18.121.0+9e31e41a/MODULE_21d49e164d26352245e5+84f3e719/model.neff +1 -1
  40. neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/compile_flags.json +1 -0
  41. neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.done +0 -0
  42. neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.hlo_module.pb +3 -0
  43. neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.neff +3 -0
  44. neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/wrapped_neff.hlo +3 -0
  45. neuronxcc-2.18.121.0+9e31e41a/MODULE_331276a07386ee77d52e+431f5505/model.neff +0 -0
  46. neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/compile_flags.json +1 -0
  47. neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.done +0 -0
  48. neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.hlo_module.pb +3 -0
  49. neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.neff +3 -0
  50. neuronxcc-2.18.121.0+9e31e41a/MODULE_3a1bd8b5ecc619e49cdb+5be477de/model.neff +1 -1
.gitattributes CHANGED
@@ -3203,3 +3203,39 @@ neuronxcc-2.17.194.0+d312836f/MODULE_cb76f3926e2853557294+165e9558/wrapped_neff.
3203
  neuronxcc-2.17.194.0+d312836f/MODULE_dea3fa0fa1232db56e94+26ac6be0/model.neff filter=lfs diff=lfs merge=lfs -text
3204
  neuronxcc-2.17.194.0+d312836f/MODULE_f3e8207126f92d912816+26ac6be0/model.neff filter=lfs diff=lfs merge=lfs -text
3205
  neuronxcc-2.17.194.0+d312836f/MODULE_1cadac86f33fc48d4ed3+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3203
  neuronxcc-2.17.194.0+d312836f/MODULE_dea3fa0fa1232db56e94+26ac6be0/model.neff filter=lfs diff=lfs merge=lfs -text
3204
  neuronxcc-2.17.194.0+d312836f/MODULE_f3e8207126f92d912816+26ac6be0/model.neff filter=lfs diff=lfs merge=lfs -text
3205
  neuronxcc-2.17.194.0+d312836f/MODULE_1cadac86f33fc48d4ed3+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
3206
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3207
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3208
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3209
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3210
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3211
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3212
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3213
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_3ddc835c8aaca5fb3605+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3214
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_46b9d2bfbdf1b2752484+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
3215
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_53e7ea3b124fbe95f047+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3216
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_57b107bd0499cc4986ac+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3217
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_57b107bd0499cc4986ac+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3218
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_5a10198534c5f2725fd7+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3219
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_5a10198534c5f2725fd7+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3220
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_5a81b67dd74f9d5520b3+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3221
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_5a81b67dd74f9d5520b3+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3222
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_64950c85776a119cdf83+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3223
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_6bb5680c622866b3b45b+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3224
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_6bb5680c622866b3b45b+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3225
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_738e59bf5e3036394abc+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3226
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_7af318ed51d57f96cca6+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3227
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_85a4070284ef318b7211+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3228
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_92d7a6b8bc621dee02b9+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3229
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_9a53db93d18e769ee7ea+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3230
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_a0119b05b11378eaad45+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3231
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_a24e3ed896dae389d4f2+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3232
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_a5ba22f7ec35560de7f4+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3233
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_a5ba22f7ec35560de7f4+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3234
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_ae5ca4b91afd03b04b25+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3235
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_ae5ca4b91afd03b04b25+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3236
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_bc0dc6318052d18d4f59+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3237
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_bc0dc6318052d18d4f59+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3238
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_d7e2548756fae2419754+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3239
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_d7e2548756fae2419754+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3240
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_f5eb91ad26a03c048d3d+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3241
+ neuronxcc-2.18.121.0+9e31e41a/MODULE_faa4eb59c0e96cbc54b3+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/509463403db38e0fee4e.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 2,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": true,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 2,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": false,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "float16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "float16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/d31ad1959dd2765771c4.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 1,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": false,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 1,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": true,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "float16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "float16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/e4aca6f4dcde393f9fd2.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 1,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": false,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 1,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": true,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "bfloat16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "bfloat16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/5968d5257cc3367062f1.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 2,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 2,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/78b0302cf20f0aaea136.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/dc140bf4a30b563525db.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/llama/unsloth/Llama-3.2-1B-Instruct/0c1a326b7a025a55c9b3.json ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
26
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 4,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 16,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 32.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": true,
75
+ "unsloth_fixed": true,
76
+ "use_cache": true,
77
+ "vocab_size": 128256
78
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/06f279f394a1e3fbaf6f.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 2,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/6b80cf4267240f3f212b.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/94cc933909cf5aea3d18.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "bfloat16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "bfloat16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/277152fa4a9b26bbac30.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/8b02c02ae4bc0249d20c.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 2,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 2,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/dc26fd0754ce079f678e.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/23feb84373f0fa7bf154.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": true,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/5b1a5843da4e0e4184af.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": true,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "bfloat16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "bfloat16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.18.121.0+9e31e41a/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/a037fcc35a84f45b912d.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": true,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 2,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.18.121.0+9e31e41a",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0803cb5e5e23b32611e1d7a9f6868f512ce57b77d3a0a29c8f986b5cce743321
3
+ size 46622
neuronxcc-2.18.121.0+9e31e41a/MODULE_02f045f6902463c49bce+84f3e719/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5230dfb96ab51bf16cb02ac63f3e8ac41c847cb011db0f82f5001847f758f0e6
3
+ size 144384
neuronxcc-2.18.121.0+9e31e41a/MODULE_046ebd86c77dc4a94c6c+5be477de/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:11e4c2fa5f8cc618cb6178a46af3b5804be0727115c0aaf6532f396ac8865c51
3
  size 236544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b5cd54ec5279c4dde9e6475b7cfaeda2aab62e3e2a92fc242c1a56d05049874
3
  size 236544
neuronxcc-2.18.121.0+9e31e41a/MODULE_046ebd86c77dc4a94c6c+5be477de/wrapped_neff.hlo CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0d66316e616936a42d3d682321b2a7b1e3f4a4f7a8e3dac77e244e457c6da082
3
  size 247153
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc1dd826502c3a5e1b83a630e5959c0f82c6ac46c9977cc351feebcc5a0fe5c1
3
  size 247153
neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cec162b6f82a677bf5bad8a8984a403624ed7e7f8405ed1fd2b89ffb70ba3af5
3
+ size 46541
neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60e93f84fcd9ed08889336789a60b971c3c21a507ccc034decf8161453cd443a
3
+ size 144384
neuronxcc-2.18.121.0+9e31e41a/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:885b920ee07701ecbd3ad5016e0df8b9fda3c38ce8ee8418f9978ccc29609f40
3
+ size 152045
neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0991e707bfd4b423cddb443d20b4f5aec4b1c262e379e65b64c27b13445ce083
3
+ size 7099
neuronxcc-2.18.121.0+9e31e41a/MODULE_0f88705903403514996d+431f5505/model.neff ADDED
Binary file (42 kB). View file
 
neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:527ff38e542de60af8633e078ec3c5c57fbadbe30d74ee1aac584834b1615288
3
+ size 53803
neuronxcc-2.18.121.0+9e31e41a/MODULE_1215feca19e3858f9ef6+84f3e719/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6869ac00c3e40d1cf9945744471e75dd98b9e6b9571289427d0902e068456fc
3
+ size 164864
neuronxcc-2.18.121.0+9e31e41a/MODULE_18642e0fd797db5b7fcb+431f5505/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ccd8e60a616162817b781abb8d9324467fe9ef1753a395eebce9fc2aca6fa197
3
  size 103424
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad2f90d77e4a5f3f591bd8b664f2daa39b197f27e7c946650e8776e7a9a5d0fc
3
  size 103424
neuronxcc-2.18.121.0+9e31e41a/MODULE_21d49e164d26352245e5+84f3e719/model.hlo_module.pb CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f3ccb7b89f7123461216316af02734622aacbdaaf771430310b55142ec4a8f66
3
  size 448722
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b25c984cce04ac74df62cd1dc284c28470313957cf8e49fea854099b9378ffcf
3
  size 448722
neuronxcc-2.18.121.0+9e31e41a/MODULE_21d49e164d26352245e5+84f3e719/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f309ec2855b27f9f1c7e04499d51dc3a1acc033f86011ba5fa360d32b900575f
3
  size 32257024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fafb376e0631f5d62079fe5c78eb54c9982815c07a92e4dd51e53e801f78048d
3
  size 32257024
neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2218fdfbf71bf29afa0ed05cbd601bd253eb224b8b68b012acc92cd349a9cfdb
3
+ size 42250
neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08bd07117299b6a109bbdcc2de25721ecca088a812d7ccf82129137817b69a44
3
+ size 164864
neuronxcc-2.18.121.0+9e31e41a/MODULE_30c8e5dffb371f5a2fc0+5be477de/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2b61c4890b4861116cc151d02681a4e10deda4aaf1728a6a24853b73747001f
3
+ size 172461
neuronxcc-2.18.121.0+9e31e41a/MODULE_331276a07386ee77d52e+431f5505/model.neff CHANGED
Binary files a/neuronxcc-2.18.121.0+9e31e41a/MODULE_331276a07386ee77d52e+431f5505/model.neff and b/neuronxcc-2.18.121.0+9e31e41a/MODULE_331276a07386ee77d52e+431f5505/model.neff differ
 
neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.done ADDED
File without changes
neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a84e2c4df78dd443728551a6323c507c957175fc24244fad6ae2771ef852433
3
+ size 83856
neuronxcc-2.18.121.0+9e31e41a/MODULE_3688ee5eab5a3273c651+84f3e719/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ba992580776ec3af728322a3b16baf4a7fc3238ff50b75431e123b378e170a0
3
+ size 175104
neuronxcc-2.18.121.0+9e31e41a/MODULE_3a1bd8b5ecc619e49cdb+5be477de/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f8e16a72ddcf52590a2d72852188ffca5ab3b2df96be716b98e5681743ede41
3
  size 216064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9c035847659ef48195627fa9b0edd5de00a79c72a7f9d5f84fa4ed48dd96e8a
3
  size 216064