qgallouedec HF Staff commited on
Commit
adaa2de
·
verified ·
1 Parent(s): 3a80552

Upload GPTNeoXForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +4 -3
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -6,8 +6,8 @@
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
  "classifier_dropout": 0.1,
9
- "dtype": "bfloat16",
10
- "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
  "hidden_size": 8,
@@ -18,6 +18,7 @@
18
  "model_type": "gpt_neox",
19
  "num_attention_heads": 4,
20
  "num_hidden_layers": 2,
 
21
  "partial_rotary_factor": 0.25,
22
  "rope_scaling": null,
23
  "rope_theta": 10000,
@@ -27,5 +28,5 @@
27
  "transformers_version": "4.57.0.dev0",
28
  "use_cache": true,
29
  "use_parallel_residual": true,
30
- "vocab_size": 50304
31
  }
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
  "classifier_dropout": 0.1,
9
+ "dtype": "float32",
10
+ "eos_token_id": 2,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
  "hidden_size": 8,
 
18
  "model_type": "gpt_neox",
19
  "num_attention_heads": 4,
20
  "num_hidden_layers": 2,
21
+ "num_key_value_heads": 2,
22
  "partial_rotary_factor": 0.25,
23
  "rope_scaling": null,
24
  "rope_theta": 10000,
 
28
  "transformers_version": "4.57.0.dev0",
29
  "use_cache": true,
30
  "use_parallel_residual": true,
31
+ "vocab_size": 50277
32
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
- "eos_token_id": 0,
5
  "transformers_version": "4.57.0.dev0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
+ "eos_token_id": 2,
5
  "transformers_version": "4.57.0.dev0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b44442826f19b5f5637fb789b0bc27f92dbaf5cba987b3e9fc72662a2f2bc068
3
- size 1616272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a6bdc7b4adcc5b815314292e533d8b7df057530b0ab2d185925f6a2a92dc5de
3
+ size 3227768