dacorvo HF Staff commited on
Commit
f23cc9c
·
verified ·
1 Parent(s): 523d0cc

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +42 -0
  2. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/2265c570ad91bd59bb02.json +77 -0
  3. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/eed40aeb6cee419f447f.json +77 -0
  4. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/fccf90563714271da2a8.json +77 -0
  5. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/0988b920dff9a2cb9736.json +55 -0
  6. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/61853a0b31b294a846cd.json +77 -0
  7. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/8261b9be41682c346506.json +77 -0
  8. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/acb8373c9ac1d7f31f35.json +56 -0
  9. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/e8f7b8c083bc968773ae.json +78 -0
  10. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/fb7899d27ccbc59330a2.json +78 -0
  11. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/mixtral/mistralai/Mixtral-8x7B-Instruct-v0.1/f96bf36952a158cc9e11.json +73 -0
  12. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/qwen2/Qwen/Qwen2.5-0.5B/442d198b468f7347f4bf.json +49 -0
  13. neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev7/llama/unsloth/Llama-3.2-1B-Instruct/86f34a5b4c3c146ba263.json +56 -0
  14. neuronxcc-2.17.194.0+d312836f/MODULE_0672b5f56c1c30461234+431f5505/model.neff +0 -0
  15. neuronxcc-2.17.194.0+d312836f/MODULE_088cee8e523341202b27+431f5505/model.neff +0 -0
  16. neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/compile_flags.json +1 -0
  17. neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.done +0 -0
  18. neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.hlo_module.pb +3 -0
  19. neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.neff +3 -0
  20. neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/model.hlo_module.pb +1 -1
  21. neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/model.neff +1 -1
  22. neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/wrapped_neff.hlo +1 -1
  23. neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/compile_flags.json +1 -0
  24. neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.done +0 -0
  25. neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.hlo_module.pb +3 -0
  26. neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.neff +3 -0
  27. neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/compile_flags.json +1 -0
  28. neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.done +0 -0
  29. neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.hlo_module.pb +3 -0
  30. neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.neff +3 -0
  31. neuronxcc-2.17.194.0+d312836f/MODULE_34a6b42796c8b4e2f58b+431f5505/model.neff +1 -1
  32. neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/compile_flags.json +1 -0
  33. neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.done +0 -0
  34. neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.hlo_module.pb +3 -0
  35. neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.neff +3 -0
  36. neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/compile_flags.json +1 -0
  37. neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.done +0 -0
  38. neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.hlo_module.pb +3 -0
  39. neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.neff +3 -0
  40. neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/compile_flags.json +1 -0
  41. neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.done +0 -0
  42. neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.hlo_module.pb +3 -0
  43. neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.neff +3 -0
  44. neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/compile_flags.json +1 -0
  45. neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.done +0 -0
  46. neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.hlo_module.pb +3 -0
  47. neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.neff +3 -0
  48. neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/wrapped_neff.hlo +3 -0
  49. neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/compile_flags.json +1 -0
  50. neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/model.done +0 -0
.gitattributes CHANGED
@@ -2662,3 +2662,45 @@ neuronxcc-2.17.194.0+d312836f/MODULE_bfedca8ab047e39c175b+431f5505/model.neff fi
2662
  neuronxcc-2.17.194.0+d312836f/MODULE_a97f776a1a796fa145d6+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2663
  neuronxcc-2.17.194.0+d312836f/MODULE_f41e2b0db5189f1f38af+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2664
  neuronxcc-2.17.194.0+d312836f/MODULE_f41e2b0db5189f1f38af+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2662
  neuronxcc-2.17.194.0+d312836f/MODULE_a97f776a1a796fa145d6+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2663
  neuronxcc-2.17.194.0+d312836f/MODULE_f41e2b0db5189f1f38af+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2664
  neuronxcc-2.17.194.0+d312836f/MODULE_f41e2b0db5189f1f38af+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2665
+ neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2666
+ neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2667
+ neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2668
+ neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2669
+ neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2670
+ neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2671
+ neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2672
+ neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2673
+ neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2674
+ neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2675
+ neuronxcc-2.17.194.0+d312836f/MODULE_4aa0e3bf1a6a1fb52787+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2676
+ neuronxcc-2.17.194.0+d312836f/MODULE_4b322d0da325d45466b4+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
2677
+ neuronxcc-2.17.194.0+d312836f/MODULE_4c28a8da143133817625+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2678
+ neuronxcc-2.17.194.0+d312836f/MODULE_638e58431e796ebec9da+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2679
+ neuronxcc-2.17.194.0+d312836f/MODULE_81cb65755241019475b4+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
2680
+ neuronxcc-2.17.194.0+d312836f/MODULE_83951e6d774a87a42cdb+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2681
+ neuronxcc-2.17.194.0+d312836f/MODULE_83951e6d774a87a42cdb+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2682
+ neuronxcc-2.17.194.0+d312836f/MODULE_8ee20cf7bba38ca778e1+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2683
+ neuronxcc-2.17.194.0+d312836f/MODULE_966cc4bb99f8d4385df3+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2684
+ neuronxcc-2.17.194.0+d312836f/MODULE_a176cb00890cc7100d7b+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
2685
+ neuronxcc-2.17.194.0+d312836f/MODULE_aa317f69a340c77d5b24+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2686
+ neuronxcc-2.17.194.0+d312836f/MODULE_aa317f69a340c77d5b24+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2687
+ neuronxcc-2.17.194.0+d312836f/MODULE_ada321cbb84058500c30+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2688
+ neuronxcc-2.17.194.0+d312836f/MODULE_b425d3d8062a3c8907bd+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2689
+ neuronxcc-2.17.194.0+d312836f/MODULE_b425d3d8062a3c8907bd+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2690
+ neuronxcc-2.17.194.0+d312836f/MODULE_ba7fb9c764ec187d8f06+26ac6be0/model.neff filter=lfs diff=lfs merge=lfs -text
2691
+ neuronxcc-2.17.194.0+d312836f/MODULE_bcbb3942ae0e6e62f9ed+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2692
+ neuronxcc-2.17.194.0+d312836f/MODULE_c1ff08289224c1071721+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
2693
+ neuronxcc-2.17.194.0+d312836f/MODULE_caf5f11d6a1c2af1a2d1+bfc62e4c/model.neff filter=lfs diff=lfs merge=lfs -text
2694
+ neuronxcc-2.17.194.0+d312836f/MODULE_caf5f11d6a1c2af1a2d1+bfc62e4c/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2695
+ neuronxcc-2.17.194.0+d312836f/MODULE_ce3f6b0c3ecffef892cf+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2696
+ neuronxcc-2.17.194.0+d312836f/MODULE_ce3f6b0c3ecffef892cf+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2697
+ neuronxcc-2.17.194.0+d312836f/MODULE_d57d96a2409c0dcbcb1f+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2698
+ neuronxcc-2.17.194.0+d312836f/MODULE_d57d96a2409c0dcbcb1f+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2699
+ neuronxcc-2.17.194.0+d312836f/MODULE_e8078469b2219abbe7b5+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2700
+ neuronxcc-2.17.194.0+d312836f/MODULE_ed703e23be7449f8c891+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2701
+ neuronxcc-2.17.194.0+d312836f/MODULE_ed703e23be7449f8c891+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2702
+ neuronxcc-2.17.194.0+d312836f/MODULE_f2426656f5f2ceb6f048+165e9558/model.neff filter=lfs diff=lfs merge=lfs -text
2703
+ neuronxcc-2.17.194.0+d312836f/MODULE_f2426656f5f2ceb6f048+165e9558/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
2704
+ neuronxcc-2.17.194.0+d312836f/MODULE_f2bc87b9df15b7784add+bfe5714b/model.neff filter=lfs diff=lfs merge=lfs -text
2705
+ neuronxcc-2.17.194.0+d312836f/MODULE_f781291bf31bf79730b8+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
2706
+ neuronxcc-2.17.194.0+d312836f/MODULE_fdcbd41f965fdff93616+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/2265c570ad91bd59bb02.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 8192,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 28672,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
26
+ "checkpoint_revision": "b1c0b44b4369b597ad119a196caf79a9c40e141e",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 24,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 24,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 64,
62
+ "num_hidden_layers": 80,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/eed40aeb6cee419f447f.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 8192,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 28672,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 8,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
26
+ "checkpoint_revision": "b1c0b44b4369b597ad119a196caf79a9c40e141e",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 24,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 8,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 24,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 64,
62
+ "num_hidden_layers": 80,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-70B/fccf90563714271da2a8.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 8192,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 28672,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
26
+ "checkpoint_revision": "b1c0b44b4369b597ad119a196caf79a9c40e141e",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 24,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 4,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 24,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 64,
62
+ "num_hidden_layers": 80,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/0988b920dff9a2cb9736.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-3.1-8B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "HloNeuronConfig",
20
+ "all_reduce_dtype": null,
21
+ "allow_flash_attention": true,
22
+ "attention_layout": "BSH",
23
+ "attn_output_transposed": false,
24
+ "auto_cast_type": "bf16",
25
+ "batch_size": 64,
26
+ "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct",
27
+ "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659",
28
+ "collectives_layout": "HSB",
29
+ "continuous_batching": true,
30
+ "fuse_qkv": true,
31
+ "group_query_attention": "shard-over-heads",
32
+ "log_softmax_scores": false,
33
+ "neuronxcc_version": "2.17.194.0+d312836f",
34
+ "optimum_neuron_version": "0.2.0.dev5",
35
+ "output_all_logits": false,
36
+ "sequence_length": 4096,
37
+ "tp_degree": 8
38
+ },
39
+ "num_attention_heads": 32,
40
+ "num_hidden_layers": 32,
41
+ "num_key_value_heads": 8,
42
+ "pretraining_tp": 1,
43
+ "rms_norm_eps": 1e-05,
44
+ "rope_scaling": {
45
+ "factor": 8.0,
46
+ "high_freq_factor": 4.0,
47
+ "low_freq_factor": 1.0,
48
+ "original_max_position_embeddings": 8192,
49
+ "rope_type": "llama3"
50
+ },
51
+ "rope_theta": 500000.0,
52
+ "tie_word_embeddings": false,
53
+ "use_cache": true,
54
+ "vocab_size": 128256
55
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/61853a0b31b294a846cd.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-3.1-8B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 64,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct",
26
+ "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 8,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 64,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 8,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 32,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/meta-llama/Llama-3.1-8B-Instruct/8261b9be41682c346506.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-3.1-8B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 32,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct",
26
+ "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 8,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 32,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 8,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 32,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/acb8373c9ac1d7f31f35.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "HloNeuronConfig",
20
+ "all_reduce_dtype": null,
21
+ "allow_flash_attention": true,
22
+ "attention_layout": "BSH",
23
+ "attn_output_transposed": false,
24
+ "auto_cast_type": "bf16",
25
+ "batch_size": 4,
26
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
27
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
28
+ "collectives_layout": "HSB",
29
+ "continuous_batching": true,
30
+ "fuse_qkv": true,
31
+ "group_query_attention": "shard-over-heads",
32
+ "log_softmax_scores": false,
33
+ "neuronxcc_version": "2.17.194.0+d312836f",
34
+ "optimum_neuron_version": "0.2.0.dev5",
35
+ "output_all_logits": false,
36
+ "sequence_length": 4096,
37
+ "tp_degree": 2
38
+ },
39
+ "num_attention_heads": 32,
40
+ "num_hidden_layers": 16,
41
+ "num_key_value_heads": 8,
42
+ "pretraining_tp": 1,
43
+ "rms_norm_eps": 1e-05,
44
+ "rope_scaling": {
45
+ "factor": 32.0,
46
+ "high_freq_factor": 4.0,
47
+ "low_freq_factor": 1.0,
48
+ "original_max_position_embeddings": 8192,
49
+ "rope_type": "llama3"
50
+ },
51
+ "rope_theta": 500000.0,
52
+ "tie_word_embeddings": true,
53
+ "unsloth_fixed": true,
54
+ "use_cache": true,
55
+ "vocab_size": 128256
56
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/e8f7b8c083bc968773ae.json ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
26
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 8,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 4,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 8,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 16,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 32.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": true,
75
+ "unsloth_fixed": true,
76
+ "use_cache": true,
77
+ "vocab_size": 128256
78
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/llama/unsloth/Llama-3.2-1B-Instruct/fb7899d27ccbc59330a2.json ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
26
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 4,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.17.194.0+d312836f",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.2.0.dev5",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 16,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 32.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": true,
75
+ "unsloth_fixed": true,
76
+ "use_cache": true,
77
+ "vocab_size": 128256
78
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/mixtral/mistralai/Mixtral-8x7B-Instruct-v0.1/f96bf36952a158cc9e11.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "mistralai/Mixtral-8x7B-Instruct-v0.1",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 14336,
14
+ "max_position_embeddings": 32768,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 4,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "mistralai/Mixtral-8x7B-Instruct-v0.1",
24
+ "checkpoint_revision": "41bd4c9e7e4fb318ca40e721131d4933966c2cc1",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 16,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 4,
35
+ "max_context_length": 4096,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 4096,
40
+ "neuronxcc_version": "2.17.194.0+d312836f",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.2.0.dev5",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "bfloat16",
50
+ "sequence_length": 4096,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "bfloat16",
56
+ "tp_degree": 16,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 32,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 1000000.0,
67
+ "router_aux_loss_coef": 0.02,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": null,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev5/qwen2/Qwen/Qwen2.5-0.5B/442d198b468f7347f4bf.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "Qwen/Qwen2.5-0.5B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 896,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 4864,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 24,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "HloNeuronConfig",
18
+ "all_reduce_dtype": null,
19
+ "allow_flash_attention": true,
20
+ "attention_layout": "HSB",
21
+ "attn_output_transposed": false,
22
+ "auto_cast_type": "bf16",
23
+ "batch_size": 4,
24
+ "checkpoint_id": "Qwen/Qwen2.5-0.5B",
25
+ "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987",
26
+ "collectives_layout": "HSB",
27
+ "continuous_batching": true,
28
+ "fuse_qkv": false,
29
+ "group_query_attention": "shard-over-heads",
30
+ "log_softmax_scores": false,
31
+ "neuronxcc_version": "2.17.194.0+d312836f",
32
+ "optimum_neuron_version": "0.2.0.dev5",
33
+ "output_all_logits": false,
34
+ "sequence_length": 4096,
35
+ "tp_degree": 2
36
+ },
37
+ "num_attention_heads": 14,
38
+ "num_hidden_layers": 24,
39
+ "num_key_value_heads": 2,
40
+ "rms_norm_eps": 1e-06,
41
+ "rope_scaling": null,
42
+ "rope_theta": 1000000.0,
43
+ "sliding_window": 32768,
44
+ "tie_word_embeddings": true,
45
+ "use_cache": true,
46
+ "use_mrope": false,
47
+ "use_sliding_window": false,
48
+ "vocab_size": 151936
49
+ }
neuronxcc-2.17.194.0+d312836f/0_REGISTRY/0.2.0.dev7/llama/unsloth/Llama-3.2-1B-Instruct/86f34a5b4c3c146ba263.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "HloNeuronConfig",
20
+ "all_reduce_dtype": null,
21
+ "allow_flash_attention": true,
22
+ "attention_layout": "BSH",
23
+ "attn_output_transposed": false,
24
+ "auto_cast_type": "fp16",
25
+ "batch_size": 4,
26
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
27
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
28
+ "collectives_layout": "HSB",
29
+ "continuous_batching": true,
30
+ "fuse_qkv": true,
31
+ "group_query_attention": "shard-over-heads",
32
+ "log_softmax_scores": false,
33
+ "neuronxcc_version": "2.17.194.0+d312836f",
34
+ "optimum_neuron_version": "0.2.0.dev7",
35
+ "output_all_logits": false,
36
+ "sequence_length": 4096,
37
+ "tp_degree": 2
38
+ },
39
+ "num_attention_heads": 32,
40
+ "num_hidden_layers": 16,
41
+ "num_key_value_heads": 8,
42
+ "pretraining_tp": 1,
43
+ "rms_norm_eps": 1e-05,
44
+ "rope_scaling": {
45
+ "factor": 32.0,
46
+ "high_freq_factor": 4.0,
47
+ "low_freq_factor": 1.0,
48
+ "original_max_position_embeddings": 8192,
49
+ "rope_type": "llama3"
50
+ },
51
+ "rope_theta": 500000.0,
52
+ "tie_word_embeddings": true,
53
+ "unsloth_fixed": true,
54
+ "use_cache": true,
55
+ "vocab_size": 128256
56
+ }
neuronxcc-2.17.194.0+d312836f/MODULE_0672b5f56c1c30461234+431f5505/model.neff CHANGED
Binary files a/neuronxcc-2.17.194.0+d312836f/MODULE_0672b5f56c1c30461234+431f5505/model.neff and b/neuronxcc-2.17.194.0+d312836f/MODULE_0672b5f56c1c30461234+431f5505/model.neff differ
 
neuronxcc-2.17.194.0+d312836f/MODULE_088cee8e523341202b27+431f5505/model.neff CHANGED
Binary files a/neuronxcc-2.17.194.0+d312836f/MODULE_088cee8e523341202b27+431f5505/model.neff and b/neuronxcc-2.17.194.0+d312836f/MODULE_088cee8e523341202b27+431f5505/model.neff differ
 
neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--auto-cast=none --model-type=transformer --tensorizer-options='--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ' -O2 --internal-num-neuroncores-per-sengine=1 --logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62d8c682f938ff3b1b9b3c57138e1a5076e49c6e55c85df692d8d69af43a97b7
3
+ size 445522
neuronxcc-2.17.194.0+d312836f/MODULE_098093605321e39067b1+bfe5714b/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79f6c5a5b43cab2c955b06302c0df17da7b0f5a6746cdb45196262f10a38c702
3
+ size 32072704
neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/model.hlo_module.pb CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6acbec9d61392836a93b8d1db69240657c81968bad2d77ed4aa5ff54732cd263
3
  size 375531
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3f3dd80294c9051a9e8ff1f5e4eb5a646e3b8c6dbbd4c5a6aa8a6d237217298
3
  size 375531
neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8283daf2cb93d48587eaaa3ad9bb35da0231f51b4c0d2dd273d69f72196de3b8
3
  size 1516544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39e9e6ca1ebbe07cae334441774e22672f454eb4834a34019ac399084d261cb2
3
  size 1516544
neuronxcc-2.17.194.0+d312836f/MODULE_0b8b4fa5620a4855f332+165e9558/wrapped_neff.hlo CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:026a47d3b0d244558d3d19c0dd3a12d944648dae0da53a0516c0a4a5d8672622
3
  size 1603514
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:348b9649b6b8a7df2a1e571bab067492308d199a24c2ca9e75f24a483e150dba
3
  size 1603514
neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--auto-cast=none --model-type=transformer --tensorizer-options='--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ' -O2 --internal-num-neuroncores-per-sengine=1 --logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:659bfaa3caeaf8610ec9442a59ee8e9fadb041133f90aaa310893d3076743607
3
+ size 919265
neuronxcc-2.17.194.0+d312836f/MODULE_22cf4b1c9280bdb27a84+bfe5714b/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:936afc71e5d663be56247d59efd97685b7d97e15ea16c901367ff64b6e1a4de2
3
+ size 32646144
neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71396bec0288a8bc3a788b55f55dbc69045eabeb8dfc89f8feb28b91ecb0c55f
3
+ size 136016
neuronxcc-2.17.194.0+d312836f/MODULE_2b346bc95a0a22d338b4+431f5505/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03da83020d93c33e44f4c975b99655077aa1c338c9ddcfcb2c52effa03a8e83c
3
+ size 2202624
neuronxcc-2.17.194.0+d312836f/MODULE_34a6b42796c8b4e2f58b+431f5505/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:277e40266261753d321701565433ece6105d089a926dc9b8d5205a103afa4c67
3
  size 1158144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:075eab47089f6b5408189c16efa63c02b2be7813b34cf32f7ac353459842c720
3
  size 1158144
neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea52d380fe4f37859998791e1672cdc8e24a757aadf3c9954d545acd81742d5d
3
+ size 339944
neuronxcc-2.17.194.0+d312836f/MODULE_38e85f0b1222d1fa5d21+431f5505/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:837753189e7d070d26672d57f1a682fed9737ab7d3094ec0fdb251419d09e97d
3
+ size 7742464
neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0732ce84be45cbbccd3553ef983356fc1a66fec1d645948ba5bd21ad3ed53959
3
+ size 67559
neuronxcc-2.17.194.0+d312836f/MODULE_3aaf2f909aa93d035b6b+431f5505/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b6a54201f19df9a20d3431d8db484c2ea1680e561140e2a26725be7b7d2a5f
3
+ size 338944
neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--auto-cast=none --model-type=transformer --tensorizer-options='--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ' -O2 --internal-num-neuroncores-per-sengine=1 --logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"
neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:911683839345c4e7ccc7066521965c7158fe86de4be24320b0625bac3031c68a
3
+ size 481775
neuronxcc-2.17.194.0+d312836f/MODULE_3fa377f9222d3e733b57+bfe5714b/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc320ba77bbd50659655c35d1bdb8b4d67489095f7367ee52198afe0c4186990
3
+ size 9350144
neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--auto-cast=none --model-type=transformer --tensorizer-options='--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ' -O2 --internal-num-neuroncores-per-sengine=1 --logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt --enable-internal-neff-wrapper"
neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.done ADDED
File without changes
neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f30c313c013d0c1bd9bcf0e0a13feebc2af2803ad3f3337b94d7457d7ee4bb0
3
+ size 1914485
neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7aec6522cde94b94130728fd1f7121ae2c635703e4d9e57561d6ca0fd7496b4a
3
+ size 10333184
neuronxcc-2.17.194.0+d312836f/MODULE_43df25d3bc7b10bffcdc+165e9558/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca560e921973586bddfbdbdbc5d097e1efd8bc81cc29bdda3650c64e7c35c9f6
3
+ size 10676882
neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--auto-cast=none --model-type=transformer --tensorizer-options='--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ' -O2 --internal-num-neuroncores-per-sengine=1 --logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt --enable-internal-neff-wrapper"
neuronxcc-2.17.194.0+d312836f/MODULE_48a1dea69e2dc2d09e06+165e9558/model.done ADDED
File without changes