dacorvo HF Staff commited on
Commit
4a01d10
·
verified ·
1 Parent(s): c1cafb3

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +52 -0
  2. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/4bd0c6ed2a9764168cb6.json +73 -0
  3. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/58bb3ae501d4d87ab565.json +73 -0
  4. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/f5771202abb1a7ae2611.json +73 -0
  5. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/3ed9abae6f7f67d07da0.json +77 -0
  6. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/42b299016a0208fd74b0.json +77 -0
  7. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/c36bfaee43f6f840c055.json +77 -0
  8. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/unsloth/Llama-3.2-1B-Instruct/ee2a399906ca25519a40.json +78 -0
  9. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/350aa0c9a5de191ee7c7.json +73 -0
  10. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/85ab3636ce10c112729d.json +73 -0
  11. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/e6b337232e4796edc487.json +73 -0
  12. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/16874c09c9bed580256d.json +74 -0
  13. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/e38863225742ea4adc6d.json +74 -0
  14. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/fecca0469b6993e5d3c5.json +74 -0
  15. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/34b1d02021287cfd46b2.json +75 -0
  16. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/7ee9b03bb7a8b01f359e.json +75 -0
  17. neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/e709ce08492ca8a65007.json +75 -0
  18. neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/compile_flags.json +1 -0
  19. neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.done +0 -0
  20. neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.hlo_module.pb +3 -0
  21. neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.neff +0 -0
  22. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/compile_flags.json +1 -0
  23. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.done +0 -0
  24. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.hlo_module.pb +3 -0
  25. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.neff +0 -0
  26. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/compile_flags.json +1 -0
  27. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.done +0 -0
  28. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.hlo_module.pb +3 -0
  29. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff +3 -0
  30. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo +3 -0
  31. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/compile_flags.json +1 -0
  32. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.done +0 -0
  33. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.hlo_module.pb +3 -0
  34. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.neff +3 -0
  35. neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/wrapped_neff.hlo +3 -0
  36. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/compile_flags.json +1 -0
  37. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.done +0 -0
  38. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.hlo_module.pb +3 -0
  39. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.neff +3 -0
  40. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/compile_flags.json +1 -0
  41. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.done +0 -0
  42. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.hlo_module.pb +3 -0
  43. neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.neff +3 -0
  44. neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/compile_flags.json +1 -0
  45. neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.done +0 -0
  46. neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.hlo_module.pb +3 -0
  47. neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.neff +3 -0
  48. neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/wrapped_neff.hlo +3 -0
  49. neuronxcc-2.19.8089.0+8ab9f450/MODULE_266c5c386fd48dd67015+431f5505/compile_flags.json +1 -0
  50. neuronxcc-2.19.8089.0+8ab9f450/MODULE_266c5c386fd48dd67015+431f5505/model.done +0 -0
.gitattributes CHANGED
@@ -3253,3 +3253,55 @@ neuronxcc-2.18.121.0+9e31e41a/MODULE_1bce819dd943e86ebacb+431f5505/model.neff fi
3253
  neuronxcc-2.18.121.0+9e31e41a/MODULE_3596a0566dd0e4bfadf9+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3254
  neuronxcc-2.18.121.0+9e31e41a/MODULE_3596a0566dd0e4bfadf9+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3255
  neuronxcc-2.18.121.0+9e31e41a/MODULE_17de2205f692a10a3037+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3253
  neuronxcc-2.18.121.0+9e31e41a/MODULE_3596a0566dd0e4bfadf9+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3254
  neuronxcc-2.18.121.0+9e31e41a/MODULE_3596a0566dd0e4bfadf9+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3255
  neuronxcc-2.18.121.0+9e31e41a/MODULE_17de2205f692a10a3037+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3256
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3257
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3258
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3259
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3260
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
3261
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3262
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3263
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3264
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_2af3b88e8bfa7573c510+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3265
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_30c8e5dffb371f5a2fc0+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3266
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_30c8e5dffb371f5a2fc0+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3267
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_3829e8e6510db07ee7eb+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3268
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_46b9d2bfbdf1b2752484+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
3269
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_51d7ba87477561ddf31e+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3270
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_5a10198534c5f2725fd7+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3271
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_5a10198534c5f2725fd7+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3272
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_67377aec01dd303d63ef+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3273
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_67377aec01dd303d63ef+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3274
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_679852c8ccd2cff92dd1+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3275
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_761b3c734083fe56af68+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3276
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_7850f8c16172e0c763cf+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3277
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_791f80da2052a6cfa05d+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3278
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_791f80da2052a6cfa05d+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3279
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_7d84a4cdb76e9616c2a7+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3280
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_81e0fb8dfd58e42d4ea8+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3281
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_8bbedb082fb6a65e221e+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3282
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_8bbedb082fb6a65e221e+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3283
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_9bef1303fb9e21c6cb66+c2248236/model.neff filter=lfs diff=lfs merge=lfs -text
3284
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_9d325789026e6ce273ae+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3285
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_9e2025b598171192a3d4+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3286
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_9e2025b598171192a3d4+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3287
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_a1746e7481045e1ccc37+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3288
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_a20343aa93b3e4cbf95f+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3289
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_a20343aa93b3e4cbf95f+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3290
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_a963e9aa5a33fdf44b01+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
3291
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_b77e0e7f43a3603bfe9b+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3292
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_b8997c221d74ec4a7842+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3293
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_bc0dc6318052d18d4f59+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3294
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_bc0dc6318052d18d4f59+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3295
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_c2d3f01e87cac25e1312+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3296
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_c2d3f01e87cac25e1312+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3297
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_d43edcfa396bb5f7fa37+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3298
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_d43edcfa396bb5f7fa37+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3299
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_d6c19a447da67dfa686e+431f5505/model.neff filter=lfs diff=lfs merge=lfs -text
3300
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_d7e2548756fae2419754+5be477de/model.neff filter=lfs diff=lfs merge=lfs -text
3301
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_d7e2548756fae2419754+5be477de/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3302
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_da644c4824015946da20+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3303
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_da644c4824015946da20+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
3304
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_e4ae034a0e6bde8dd152+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3305
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_e9c7a317b8ff33889716+84f3e719/model.neff filter=lfs diff=lfs merge=lfs -text
3306
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_f95e3cf0271f5c661734+ca355898/model.neff filter=lfs diff=lfs merge=lfs -text
3307
+ neuronxcc-2.19.8089.0+8ab9f450/MODULE_f95e3cf0271f5c661734+ca355898/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/4bd0c6ed2a9764168cb6.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 1,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": false,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 1,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": true,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "float16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "float16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/58bb3ae501d4d87ab565.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 2,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": true,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 2,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": false,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "float16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "float16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/f5771202abb1a7ae2611.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "embedding_multiplier": 1.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 32,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "logits_scaling": 1.0,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "async_mode": false,
23
+ "attn_kernel_enabled": false,
24
+ "batch_size": 1,
25
+ "capacity_factor": null,
26
+ "cc_pipeline_tiling_factor": 2,
27
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
28
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
29
+ "continuous_batching": false,
30
+ "enable_bucketing": false,
31
+ "ep_degree": 1,
32
+ "flash_decoding_enabled": false,
33
+ "fused_qkv": true,
34
+ "glu_mlp": true,
35
+ "is_chunked_prefill": false,
36
+ "local_ranks_size": 2,
37
+ "logical_nc_config": 1,
38
+ "max_batch_size": 1,
39
+ "max_context_length": 100,
40
+ "max_topk": 256,
41
+ "mlp_kernel_enabled": false,
42
+ "mlp_kernel_fuse_residual_add": false,
43
+ "n_active_tokens": 100,
44
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
45
+ "num_cores_per_group": 1,
46
+ "on_device_sampling": true,
47
+ "optimum_neuron_version": "0.3.0.dev2",
48
+ "output_logits": false,
49
+ "padding_side": "right",
50
+ "pp_degree": 1,
51
+ "qk_layernorm": false,
52
+ "qkv_kernel_enabled": false,
53
+ "rpl_reduce_dtype": "bfloat16",
54
+ "sequence_length": 100,
55
+ "sequence_parallel_enabled": false,
56
+ "speculation_length": 0,
57
+ "start_rank_id": 0,
58
+ "target": null,
59
+ "torch_dtype": "bfloat16",
60
+ "tp_degree": 2,
61
+ "vocab_parallel": false
62
+ },
63
+ "num_attention_heads": 4,
64
+ "num_hidden_layers": 2,
65
+ "num_key_value_heads": 4,
66
+ "residual_multiplier": 1.0,
67
+ "rms_norm_eps": 1e-06,
68
+ "rope_scaling": null,
69
+ "rope_theta": 10000.0,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 49152
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/3ed9abae6f7f67d07da0.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 2,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 2,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/42b299016a0208fd74b0.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/llamafactory/tiny-random-Llama-3/c36bfaee43f6f840c055.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
26
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 4,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 4,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 8.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": false,
75
+ "use_cache": true,
76
+ "vocab_size": 128256
77
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/llama/unsloth/Llama-3.2-1B-Instruct/ee2a399906ca25519a40.json ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
26
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 4,
37
+ "max_context_length": 4096,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 4096,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 4096,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 32,
62
+ "num_hidden_layers": 16,
63
+ "num_key_value_heads": 8,
64
+ "pretraining_tp": 1,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_scaling": {
67
+ "factor": 32.0,
68
+ "high_freq_factor": 4.0,
69
+ "low_freq_factor": 1.0,
70
+ "original_max_position_embeddings": 8192,
71
+ "rope_type": "llama3"
72
+ },
73
+ "rope_theta": 500000.0,
74
+ "tie_word_embeddings": true,
75
+ "unsloth_fixed": true,
76
+ "use_cache": true,
77
+ "vocab_size": 128256
78
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/350aa0c9a5de191ee7c7.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 2,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/85ab3636ce10c112729d.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/mixtral/dacorvo/Mixtral-tiny/e6b337232e4796edc487.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "head_dim": 32,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3584,
14
+ "max_position_embeddings": 1024,
15
+ "model_type": "mixtral",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
24
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "bfloat16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "bfloat16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 32,
60
+ "num_experts_per_tok": 2,
61
+ "num_hidden_layers": 2,
62
+ "num_key_value_heads": 8,
63
+ "num_local_experts": 8,
64
+ "output_router_logits": false,
65
+ "rms_norm_eps": 1e-05,
66
+ "rope_theta": 10000.0,
67
+ "router_aux_loss_coef": 0.001,
68
+ "router_jitter_noise": 0.0,
69
+ "sliding_window": 4096,
70
+ "tie_word_embeddings": false,
71
+ "use_cache": true,
72
+ "vocab_size": 32000
73
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/16874c09c9bed580256d.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 2,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 2,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": false,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/e38863225742ea4adc6d.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "float16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "float16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/phi3/yujiepan/phi-4-tiny-random/fecca0469b6993e5d3c5.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "embd_pdrop": 0.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 32,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "async_mode": false,
21
+ "attn_kernel_enabled": false,
22
+ "batch_size": 1,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
26
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
27
+ "continuous_batching": false,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "flash_decoding_enabled": false,
31
+ "fused_qkv": true,
32
+ "glu_mlp": true,
33
+ "is_chunked_prefill": false,
34
+ "local_ranks_size": 2,
35
+ "logical_nc_config": 1,
36
+ "max_batch_size": 1,
37
+ "max_context_length": 100,
38
+ "max_topk": 256,
39
+ "mlp_kernel_enabled": false,
40
+ "mlp_kernel_fuse_residual_add": false,
41
+ "n_active_tokens": 100,
42
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
43
+ "num_cores_per_group": 1,
44
+ "on_device_sampling": true,
45
+ "optimum_neuron_version": "0.3.0.dev2",
46
+ "output_logits": false,
47
+ "padding_side": "right",
48
+ "pp_degree": 1,
49
+ "qk_layernorm": false,
50
+ "qkv_kernel_enabled": false,
51
+ "rpl_reduce_dtype": "bfloat16",
52
+ "sequence_length": 100,
53
+ "sequence_parallel_enabled": false,
54
+ "speculation_length": 0,
55
+ "start_rank_id": 0,
56
+ "target": null,
57
+ "torch_dtype": "bfloat16",
58
+ "tp_degree": 2,
59
+ "vocab_parallel": false
60
+ },
61
+ "num_attention_heads": 2,
62
+ "num_hidden_layers": 2,
63
+ "num_key_value_heads": 1,
64
+ "original_max_position_embeddings": 16384,
65
+ "partial_rotary_factor": 1.0,
66
+ "resid_pdrop": 0.0,
67
+ "rms_norm_eps": 1e-05,
68
+ "rope_scaling": null,
69
+ "rope_theta": 250000,
70
+ "sliding_window": null,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "vocab_size": 100352
74
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/34b1d02021287cfd46b2.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": true,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 2,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": false,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/7ee9b03bb7a8b01f359e.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": true,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "float16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "float16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.0.dev2/qwen2/yujiepan/qwen2.5-128k-tiny-random/e709ce08492ca8a65007.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 1,
15
+ "model_type": "qwen2",
16
+ "neuron": {
17
+ "_serialized_key": "NxDNeuronConfig",
18
+ "async_mode": false,
19
+ "attn_kernel_enabled": false,
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "cc_pipeline_tiling_factor": 2,
23
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
24
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
25
+ "continuous_batching": false,
26
+ "enable_bucketing": false,
27
+ "ep_degree": 1,
28
+ "flash_decoding_enabled": false,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "is_chunked_prefill": false,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 1,
35
+ "max_context_length": 100,
36
+ "max_topk": 256,
37
+ "mlp_kernel_enabled": false,
38
+ "mlp_kernel_fuse_residual_add": false,
39
+ "n_active_tokens": 100,
40
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
41
+ "num_cores_per_group": 1,
42
+ "on_device_sampling": true,
43
+ "optimum_neuron_version": "0.3.0.dev2",
44
+ "output_logits": false,
45
+ "padding_side": "right",
46
+ "pp_degree": 1,
47
+ "qk_layernorm": false,
48
+ "qkv_kernel_enabled": false,
49
+ "rpl_reduce_dtype": "bfloat16",
50
+ "sequence_length": 100,
51
+ "sequence_parallel_enabled": false,
52
+ "speculation_length": 0,
53
+ "start_rank_id": 0,
54
+ "target": null,
55
+ "torch_dtype": "bfloat16",
56
+ "tp_degree": 2,
57
+ "vocab_parallel": false
58
+ },
59
+ "num_attention_heads": 4,
60
+ "num_hidden_layers": 2,
61
+ "num_key_value_heads": 2,
62
+ "rms_norm_eps": 1e-06,
63
+ "rope_scaling": {
64
+ "factor": 4.0,
65
+ "original_max_position_embeddings": 32768,
66
+ "rope_type": "yarn",
67
+ "type": "yarn"
68
+ },
69
+ "rope_theta": 1000000.0,
70
+ "sliding_window": 131072,
71
+ "tie_word_embeddings": false,
72
+ "use_cache": true,
73
+ "use_sliding_window": false,
74
+ "vocab_size": 152064
75
+ }
neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0586c9953cf96a4bf2dcd9f92fbeb91b5ee1d0c4a4fa4b6552ca650e05704245
3
+ size 7099
neuronxcc-2.19.8089.0+8ab9f450/MODULE_00333196986acf2eb79a+431f5505/model.neff ADDED
Binary file (42 kB). View file
 
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adacc7e6f071cf30f81197d157059b22c0963bf27c33f23e9d9610917f32f632
3
+ size 7106
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0661e1d8741e9ead0d5c+431f5505/model.neff ADDED
Binary file (42 kB). View file
 
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cec162b6f82a677bf5bad8a8984a403624ed7e7f8405ed1fd2b89ffb70ba3af5
3
+ size 46541
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ab9e2f02fa29415c2a584da7d3a0dd489fa46185414b6a6ceda1ff02a0b7e42
3
+ size 154624
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0bbe60dde8eaacbc8218+5be477de/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12294311aab65c8b1e1b69aefdf4cc623e27bd53029218381ec6a4cf66f7584c
3
+ size 162285
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfb1031564220fbc15ea84f84f3e9f18fdf944af7f7d4ec99c1a7a5dc08489
3
+ size 81006
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a89157420d03816ac2f9222a89bce907177f1a5ca48f1137c20a533f8fdfb10a
3
+ size 236544
neuronxcc-2.19.8089.0+8ab9f450/MODULE_0f82fe03a3af29dff8f6+5be477de/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f02bf62809adc55ce64637129b796e84a3032d0036f542bf94e0f21ec5512b0e
3
+ size 244319
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ed7e1763d2fde0d0bfdf8790eecf071cb45a47c0486921354c5beb46403b026
3
+ size 11227
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1d687728a717f87883ca+431f5505/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7620edb3b2c01abe336f904cd409b85732204a2bfa44a10ccba787a4a0b474e7
3
+ size 103424
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb3f3c80a1f760b0f801aa56dd94adcd18f8703faf4fbe1cf4629086b0f3ced2
3
+ size 87785
neuronxcc-2.19.8089.0+8ab9f450/MODULE_1ea7c53c16e767166083+84f3e719/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bcc37564d8330894550bcd1c560c819f27452fc9a2dccfc5012d25347f52f7d
3
+ size 246784
neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-dge-dma --vectorize-strided-dma ", "-O2", "--internal-num-neuroncores-per-sengine=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.done ADDED
File without changes
neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b04b389baff4d431c49333fe1097c9bf015a8d8f0e9ab7b2dac8b6c470c50275
3
+ size 79198
neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c09b60673a5f6626e5e43ee91b19cdf385f2dc3113114ba3212f31b086f1fc7
3
+ size 216064
neuronxcc-2.19.8089.0+8ab9f450/MODULE_2371c90829406a620831+5be477de/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6979a724da90b05c60b1f6049e232b513dd03d8d1a531e9c0704643c8ca737ab
3
+ size 223770
neuronxcc-2.19.8089.0+8ab9f450/MODULE_266c5c386fd48dd67015+431f5505/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ "--model-type=transformer -O1 --lnc=1 --internal-hlo2tensorizer-options=--experimental-unsafe-fp8e4m3fn-as-fp8e4m3 --logfile=/tmp/nxd_model/layout_opt/log-neuron-cc.txt"
neuronxcc-2.19.8089.0+8ab9f450/MODULE_266c5c386fd48dd67015+431f5505/model.done ADDED
File without changes