zR
commited on
for transformers 4.47
Browse files- tokenization_chatglm.py +1 -0
tokenization_chatglm.py
CHANGED
|
@@ -197,6 +197,7 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
|
|
| 197 |
self,
|
| 198 |
encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding],
|
| 199 |
max_length: Optional[int] = None,
|
|
|
|
| 200 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 201 |
pad_to_multiple_of: Optional[int] = None,
|
| 202 |
return_attention_mask: Optional[bool] = None,
|
|
|
|
| 197 |
self,
|
| 198 |
encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding],
|
| 199 |
max_length: Optional[int] = None,
|
| 200 |
+
padding_side: str = "left",
|
| 201 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 202 |
pad_to_multiple_of: Optional[int] = None,
|
| 203 |
return_attention_mask: Optional[bool] = None,
|