Synchronizing local compiler cache.
Browse files
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.2.0.dev0/inference/phi3/microsoft/Phi-3-mini-4k-instruct/34e681f1c4c2cc6c581b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["Phi3ForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "auto_map": {"AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM"}, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 4096, "model_type": "phi3", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "microsoft/Phi-3-mini-4k-instruct", "checkpoint_revision": "0a67737cc96d2554230f90338b163bc6380a2a85", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "sliding_window": 2047, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32064}
|
neuronxcc-2.16.372.0+4a9b2326/MODULE_2db40b5b23a523e16b87+613edded/model.neff
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6493184
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02adb5db26a9c368e32072a97a76e5e38fa4ac33349035f317c4aeea641fc68a
|
3 |
size 6493184
|
neuronxcc-2.16.372.0+4a9b2326/MODULE_967d83f73b8fe30e4d14+613edded/model.neff
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14224384
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30fbc0ff0ce1cddd47a69fbf4d7e1e459e3a71d5446956127470c6f83591b396
|
3 |
size 14224384
|