Training done
Browse files- preprocessor_config.json +4 -4
- tokenizer.json +0 -0
- tokenizer_config.json +7 -0
preprocessor_config.json
CHANGED
|
@@ -19,8 +19,8 @@
|
|
| 19 |
"processor_class": "DonutProcessor",
|
| 20 |
"resample": 2,
|
| 21 |
"rescale_factor": 0.00392156862745098,
|
| 22 |
-
"size":
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
}
|
|
|
|
| 19 |
"processor_class": "DonutProcessor",
|
| 20 |
"resample": 2,
|
| 21 |
"rescale_factor": 0.00392156862745098,
|
| 22 |
+
"size": {
|
| 23 |
+
"height": 1280,
|
| 24 |
+
"width": 960
|
| 25 |
+
}
|
| 26 |
}
|
tokenizer.json
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
CHANGED
|
@@ -314,11 +314,18 @@
|
|
| 314 |
"cls_token": "<s>",
|
| 315 |
"eos_token": "</s>",
|
| 316 |
"mask_token": "<mask>",
|
|
|
|
| 317 |
"model_max_length": 1000000000000000019884624838656,
|
|
|
|
| 318 |
"pad_token": "<pad>",
|
|
|
|
|
|
|
| 319 |
"processor_class": "DonutProcessor",
|
| 320 |
"sep_token": "</s>",
|
| 321 |
"sp_model_kwargs": {},
|
|
|
|
| 322 |
"tokenizer_class": "XLMRobertaTokenizer",
|
|
|
|
|
|
|
| 323 |
"unk_token": "<unk>"
|
| 324 |
}
|
|
|
|
| 314 |
"cls_token": "<s>",
|
| 315 |
"eos_token": "</s>",
|
| 316 |
"mask_token": "<mask>",
|
| 317 |
+
"max_length": 768,
|
| 318 |
"model_max_length": 1000000000000000019884624838656,
|
| 319 |
+
"pad_to_multiple_of": null,
|
| 320 |
"pad_token": "<pad>",
|
| 321 |
+
"pad_token_type_id": 0,
|
| 322 |
+
"padding_side": "right",
|
| 323 |
"processor_class": "DonutProcessor",
|
| 324 |
"sep_token": "</s>",
|
| 325 |
"sp_model_kwargs": {},
|
| 326 |
+
"stride": 0,
|
| 327 |
"tokenizer_class": "XLMRobertaTokenizer",
|
| 328 |
+
"truncation_side": "right",
|
| 329 |
+
"truncation_strategy": "longest_first",
|
| 330 |
"unk_token": "<unk>"
|
| 331 |
}
|