qgallouedec HF Staff commited on
Commit
8a1afeb
·
verified ·
1 Parent(s): 2cb3712

Upload PaliGemmaForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +5 -3
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "PaliGemmaForConditionalGeneration"
4
  ],
5
  "bos_token_id": 2,
 
6
  "eos_token_id": 1,
7
  "hidden_size": 2048,
8
  "ignore_index": -100,
@@ -13,12 +14,14 @@
13
  "text_config": {
14
  "attention_bias": false,
15
  "attention_dropout": 0.0,
 
16
  "head_dim": 256,
17
  "hidden_act": "gelu_pytorch_tanh",
18
  "hidden_activation": null,
19
  "hidden_size": 16,
20
  "initializer_range": 0.02,
21
  "intermediate_size": 16384,
 
22
  "max_position_embeddings": 8192,
23
  "model_type": "gemma",
24
  "num_attention_heads": 4,
@@ -27,14 +30,13 @@
27
  "num_key_value_heads": 2,
28
  "rms_norm_eps": 1e-06,
29
  "rope_theta": 10000.0,
30
- "torch_dtype": "float32",
31
  "use_cache": true,
32
  "vocab_size": 257216
33
  },
34
- "torch_dtype": "bfloat16",
35
- "transformers_version": "4.56.0.dev0",
36
  "vision_config": {
37
  "attention_dropout": 0.0,
 
38
  "hidden_act": "gelu_pytorch_tanh",
39
  "hidden_size": 16,
40
  "image_size": 224,
 
3
  "PaliGemmaForConditionalGeneration"
4
  ],
5
  "bos_token_id": 2,
6
+ "dtype": "bfloat16",
7
  "eos_token_id": 1,
8
  "hidden_size": 2048,
9
  "ignore_index": -100,
 
14
  "text_config": {
15
  "attention_bias": false,
16
  "attention_dropout": 0.0,
17
+ "dtype": "float32",
18
  "head_dim": 256,
19
  "hidden_act": "gelu_pytorch_tanh",
20
  "hidden_activation": null,
21
  "hidden_size": 16,
22
  "initializer_range": 0.02,
23
  "intermediate_size": 16384,
24
+ "layer_types": null,
25
  "max_position_embeddings": 8192,
26
  "model_type": "gemma",
27
  "num_attention_heads": 4,
 
30
  "num_key_value_heads": 2,
31
  "rms_norm_eps": 1e-06,
32
  "rope_theta": 10000.0,
 
33
  "use_cache": true,
34
  "vocab_size": 257216
35
  },
36
+ "transformers_version": "4.57.0.dev0",
 
37
  "vision_config": {
38
  "attention_dropout": 0.0,
39
+ "embed_dim": 32,
40
  "hidden_act": "gelu_pytorch_tanh",
41
  "hidden_size": 16,
42
  "image_size": 224,
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.56.0.dev0"
7
  }
 
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.57.0.dev0"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:62bd764abe98bd832ce30f426108b70e2790384596a9ea8257a192d16b219966
3
  size 12250448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21b3ebba8f0106c9bc0ac0a550193f6b9c5780acaf87d72bbc341daef6d25110
3
  size 12250448