Upload tokenizer
Browse files- special_tokens_map.json +0 -7
- tokenizer_config.json +0 -1
    	
        special_tokens_map.json
    CHANGED
    
    | @@ -12,12 +12,5 @@ | |
| 12 | 
             
                "normalized": false,
         | 
| 13 | 
             
                "rstrip": false,
         | 
| 14 | 
             
                "single_word": false
         | 
| 15 | 
            -
              },
         | 
| 16 | 
            -
              "pad_token": {
         | 
| 17 | 
            -
                "content": "<|end_of_text|>",
         | 
| 18 | 
            -
                "lstrip": false,
         | 
| 19 | 
            -
                "normalized": false,
         | 
| 20 | 
            -
                "rstrip": false,
         | 
| 21 | 
            -
                "single_word": false
         | 
| 22 | 
             
              }
         | 
| 23 | 
             
            }
         | 
|  | |
| 12 | 
             
                "normalized": false,
         | 
| 13 | 
             
                "rstrip": false,
         | 
| 14 | 
             
                "single_word": false
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 15 | 
             
              }
         | 
| 16 | 
             
            }
         | 
    	
        tokenizer_config.json
    CHANGED
    
    | @@ -2058,6 +2058,5 @@ | |
| 2058 | 
             
                "attention_mask"
         | 
| 2059 | 
             
              ],
         | 
| 2060 | 
             
              "model_max_length": 1000000000000000019884624838656,
         | 
| 2061 | 
            -
              "pad_token": "<|end_of_text|>",
         | 
| 2062 | 
             
              "tokenizer_class": "PreTrainedTokenizerFast"
         | 
| 2063 | 
             
            }
         | 
|  | |
| 2058 | 
             
                "attention_mask"
         | 
| 2059 | 
             
              ],
         | 
| 2060 | 
             
              "model_max_length": 1000000000000000019884624838656,
         | 
|  | |
| 2061 | 
             
              "tokenizer_class": "PreTrainedTokenizerFast"
         | 
| 2062 | 
             
            }
         |