michaelbenayoun HF Staff commited on
Commit
75e95ca
·
verified ·
1 Parent(s): 157d051

Upload DeepseekV3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +11 -11
  2. model.safetensors +2 -2
config.json CHANGED
@@ -18,24 +18,24 @@
18
  "hidden_size": 128,
19
  "initializer_range": 0.02,
20
  "intermediate_size": 256,
21
- "kv_lora_rank": 16,
22
  "max_position_embeddings": 2048,
23
  "model_type": "deepseek_v3",
24
  "moe_intermediate_size": 64,
25
  "moe_layer_freq": 1,
26
  "n_group": 2,
27
- "n_routed_experts": 4,
28
  "n_shared_experts": 1,
29
  "norm_topk_prob": true,
30
- "num_attention_heads": 4,
31
  "num_experts_per_tok": 2,
32
  "num_hidden_layers": 6,
33
- "num_key_value_heads": 2,
34
  "num_nextn_predict_layers": 1,
35
  "pretraining_tp": 1,
36
  "q_lora_rank": 32,
37
- "qk_head_dim": 32,
38
- "qk_nope_head_dim": 24,
39
  "qk_rope_head_dim": 8,
40
  "quantization_config": {
41
  "activation_scheme": "dynamic",
@@ -49,9 +49,9 @@
49
  "rms_norm_eps": 1e-06,
50
  "rope_interleave": true,
51
  "rope_scaling": {
52
- "beta_fast": 32,
53
- "beta_slow": 1,
54
- "factor": 40,
55
  "mscale": 1.0,
56
  "mscale_all_dim": 1.0,
57
  "original_max_position_embeddings": 2048,
@@ -67,6 +67,6 @@
67
  "torch_dtype": "float32",
68
  "transformers_version": "4.51.3",
69
  "use_cache": true,
70
- "v_head_dim": 32,
71
- "vocab_size": 163840
72
  }
 
18
  "hidden_size": 128,
19
  "initializer_range": 0.02,
20
  "intermediate_size": 256,
21
+ "kv_lora_rank": 32,
22
  "max_position_embeddings": 2048,
23
  "model_type": "deepseek_v3",
24
  "moe_intermediate_size": 64,
25
  "moe_layer_freq": 1,
26
  "n_group": 2,
27
+ "n_routed_experts": 8,
28
  "n_shared_experts": 1,
29
  "norm_topk_prob": true,
30
+ "num_attention_heads": 8,
31
  "num_experts_per_tok": 2,
32
  "num_hidden_layers": 6,
33
+ "num_key_value_heads": 8,
34
  "num_nextn_predict_layers": 1,
35
  "pretraining_tp": 1,
36
  "q_lora_rank": 32,
37
+ "qk_head_dim": 24,
38
+ "qk_nope_head_dim": 16,
39
  "qk_rope_head_dim": 8,
40
  "quantization_config": {
41
  "activation_scheme": "dynamic",
 
49
  "rms_norm_eps": 1e-06,
50
  "rope_interleave": true,
51
  "rope_scaling": {
52
+ "beta_fast": 32.0,
53
+ "beta_slow": 1.0,
54
+ "factor": 40.0,
55
  "mscale": 1.0,
56
  "mscale_all_dim": 1.0,
57
  "original_max_position_embeddings": 2048,
 
67
  "torch_dtype": "float32",
68
  "transformers_version": "4.51.3",
69
  "use_cache": true,
70
+ "v_head_dim": 16,
71
+ "vocab_size": 129280
72
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e068d3a17bef93953848bfdbf2a11b644d1f85316c1c35a07470d690b86ddcd4
3
- size 171101328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5070c89c812ace8f741f9ffb2cb89dec28766ea792a77634f15988afe306fa2d
3
+ size 136714544