Update for Transformers PR (#2)
Browse files- update (655480ca450dedaa6f93ae9688b7c2decb66efd5)
Co-authored-by: Raushan Turganbay <[email protected]>
- config.json +1 -1
- preprocessor_config.json +1 -2
config.json
CHANGED
|
@@ -89,7 +89,7 @@
|
|
| 89 |
"model_type": "siglip2_vision_model",
|
| 90 |
"num_attention_heads": 16,
|
| 91 |
"num_channels": 3,
|
| 92 |
-
"num_hidden_layers":
|
| 93 |
"num_patches": 256,
|
| 94 |
"patch_size": 16,
|
| 95 |
"torch_dtype": "bfloat16",
|
|
|
|
| 89 |
"model_type": "siglip2_vision_model",
|
| 90 |
"num_attention_heads": 16,
|
| 91 |
"num_channels": 3,
|
| 92 |
+
"num_hidden_layers": 26,
|
| 93 |
"num_patches": 256,
|
| 94 |
"patch_size": 16,
|
| 95 |
"torch_dtype": "bfloat16",
|
preprocessor_config.json
CHANGED
|
@@ -11,13 +11,12 @@
|
|
| 11 |
0.5,
|
| 12 |
0.5
|
| 13 |
],
|
| 14 |
-
"image_processor_type": "
|
| 15 |
"image_std": [
|
| 16 |
0.5,
|
| 17 |
0.5,
|
| 18 |
0.5
|
| 19 |
],
|
| 20 |
-
"input_data_format": "channels_last",
|
| 21 |
"max_num_patches": 1024,
|
| 22 |
"patch_size": 16,
|
| 23 |
"processor_class": "Lfm2VlProcessor",
|
|
|
|
| 11 |
0.5,
|
| 12 |
0.5
|
| 13 |
],
|
| 14 |
+
"image_processor_type": "Lfm2VlImageProcessorFast",
|
| 15 |
"image_std": [
|
| 16 |
0.5,
|
| 17 |
0.5,
|
| 18 |
0.5
|
| 19 |
],
|
|
|
|
| 20 |
"max_num_patches": 1024,
|
| 21 |
"patch_size": 16,
|
| 22 |
"processor_class": "Lfm2VlProcessor",
|