qgallouedec HF Staff commited on
Commit
54f7fa5
·
verified ·
1 Parent(s): abfa104

Upload Qwen2ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +5 -1
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -7,6 +7,10 @@
7
  "hidden_size": 8,
8
  "initializer_range": 0.02,
9
  "intermediate_size": 32,
 
 
 
 
10
  "max_position_embeddings": 32768,
11
  "max_window_layers": 28,
12
  "model_type": "qwen2",
@@ -19,7 +23,7 @@
19
  "sliding_window": null,
20
  "tie_word_embeddings": false,
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.47.0.dev0",
23
  "use_cache": true,
24
  "use_sliding_window": false,
25
  "vocab_size": 151665
 
7
  "hidden_size": 8,
8
  "initializer_range": 0.02,
9
  "intermediate_size": 32,
10
+ "layer_types": [
11
+ "full_attention",
12
+ "full_attention"
13
+ ],
14
  "max_position_embeddings": 32768,
15
  "max_window_layers": 28,
16
  "model_type": "qwen2",
 
23
  "sliding_window": null,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.53.0.dev0",
27
  "use_cache": true,
28
  "use_sliding_window": false,
29
  "vocab_size": 151665
generation_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.47.0.dev0"
4
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.53.0.dev0"
4
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6a9a7efee47c981e76e47aa2b4a7679ff19dda9423908bb997d0bfbf7bfe6789
3
  size 9717288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cae5948041182dfc658395a0aeebe58ae3ef98413520cd26cb8f4a88d88943c8
3
  size 9717288