not-lain commited on
Commit
340dfab
·
verified ·
1 Parent(s): cbf0064

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -1
  2. generation_config.json +1 -0
config.json CHANGED
@@ -1,11 +1,12 @@
1
  {
2
- "_name_or_path": "HuggingFaceTB/SmolLM-135M",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
 
9
  "eos_token_id": 0,
10
  "hidden_act": "silu",
11
  "hidden_size": 576,
 
1
  {
2
+ "_name_or_path": "not-lain/smol-gec-sft",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
9
+ "end_token_id": 0,
10
  "eos_token_id": 0,
11
  "hidden_act": "silu",
12
  "hidden_size": 576,
generation_config.json CHANGED
@@ -2,5 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
 
5
  "transformers_version": "4.44.2"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
+ "pad_token_id": 0,
6
  "transformers_version": "4.44.2"
7
  }