dacorvo HF Staff commited on
Commit
86a35b3
·
verified ·
1 Parent(s): f05c471

Synchronizing local compiler cache.

Browse files
neuronxcc-2.19.8089.0+8ab9f450/0_REGISTRY/0.3.1.dev5/granite/ibm-granite/granite-3.1-2b-instruct/132e478ab06dfd6f996e.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "ibm-granite/granite-3.1-2b-instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.1,
10
+ "attention_multiplier": 0.015625,
11
+ "embedding_multiplier": 12.0,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 2048,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 8192,
16
+ "logits_scaling": 8.0,
17
+ "max_position_embeddings": 131072,
18
+ "mlp_bias": false,
19
+ "model_type": "granite",
20
+ "neuron": {
21
+ "_serialized_key": "NxDNeuronConfig",
22
+ "batch_size": 4,
23
+ "capacity_factor": null,
24
+ "cc_pipeline_tiling_factor": 2,
25
+ "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct",
26
+ "checkpoint_revision": "bbc2aed595bd38bd770263dc3ab831db9794441d",
27
+ "continuous_batching": true,
28
+ "enable_bucketing": false,
29
+ "ep_degree": 1,
30
+ "fused_qkv": true,
31
+ "glu_mlp": true,
32
+ "local_ranks_size": 2,
33
+ "logical_nc_config": 1,
34
+ "max_batch_size": 4,
35
+ "max_context_length": 4096,
36
+ "max_topk": 256,
37
+ "n_active_tokens": 4096,
38
+ "neuronxcc_version": "2.19.8089.0+8ab9f450",
39
+ "on_device_sampling": true,
40
+ "optimum_neuron_version": "0.3.1.dev5",
41
+ "output_logits": false,
42
+ "pp_degree": 1,
43
+ "sequence_length": 4096,
44
+ "speculation_length": 0,
45
+ "start_rank_id": 0,
46
+ "target": null,
47
+ "torch_dtype": "bfloat16",
48
+ "tp_degree": 2
49
+ },
50
+ "num_attention_heads": 32,
51
+ "num_hidden_layers": 40,
52
+ "num_key_value_heads": 8,
53
+ "residual_multiplier": 0.22,
54
+ "rms_norm_eps": 1e-05,
55
+ "rope_scaling": null,
56
+ "rope_theta": 5000000.0,
57
+ "tie_word_embeddings": true,
58
+ "use_cache": true,
59
+ "vocab_size": 49155
60
+ }