Crystalcareai commited on
Commit
267cc31
·
1 Parent(s): 28edcd1

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,9 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- "<unk>",
4
- "<s>",
5
- "</s>"
6
- ],
7
  "bos_token": {
8
  "content": "<s>",
9
  "lstrip": false,
@@ -18,13 +13,7 @@
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
- "pad_token": {
22
- "content": "</s>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false
27
- },
28
  "unk_token": {
29
  "content": "<unk>",
30
  "lstrip": false,
 
1
  {
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "version": "1.0",
3
  "truncation": {
4
- "direction": "Left",
5
- "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
 
1
  {
2
  "version": "1.0",
3
  "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 4096,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
tokenizer_config.json CHANGED
@@ -25,13 +25,8 @@
25
  "special": true
26
  }
27
  },
28
- "additional_special_tokens": [
29
- "<unk>",
30
- "<s>",
31
- "</s>"
32
- ],
33
  "bos_token": "<s>",
34
- "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
35
  "clean_up_tokenization_spaces": false,
36
  "eos_token": "</s>",
37
  "legacy": true,
@@ -40,7 +35,6 @@
40
  "sp_model_kwargs": {},
41
  "spaces_between_special_tokens": false,
42
  "tokenizer_class": "LlamaTokenizer",
43
- "truncation_side": "left",
44
  "unk_token": "<unk>",
45
- "use_default_system_prompt": true
46
  }
 
25
  "special": true
26
  }
27
  },
28
+ "additional_special_tokens": [],
 
 
 
 
29
  "bos_token": "<s>",
 
30
  "clean_up_tokenization_spaces": false,
31
  "eos_token": "</s>",
32
  "legacy": true,
 
35
  "sp_model_kwargs": {},
36
  "spaces_between_special_tokens": false,
37
  "tokenizer_class": "LlamaTokenizer",
 
38
  "unk_token": "<unk>",
39
+ "use_default_system_prompt": false
40
  }