jnjj commited on
Commit
776021a
·
verified ·
1 Parent(s): 55d3635

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -7
config.json CHANGED
@@ -23,11 +23,11 @@
23
  "hidden_size": 2560,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 10240,
26
- "max_position_embeddings": 13107200,
27
  "model_type": "gemma3_text",
28
- "num_attention_heads": 2,
29
- "num_hidden_layers": 1,
30
- "num_key_value_heads": 2,
31
  "query_pre_attn_scalar": 256,
32
  "rms_norm_eps": 1e-06,
33
  "rope_local_base_freq": 10000.0,
@@ -52,11 +52,11 @@
52
  "intermediate_size": 4304,
53
  "layer_norm_eps": 1e-06,
54
  "model_type": "siglip_vision_model",
55
- "num_attention_heads": 2,
56
  "num_channels": 3,
57
- "num_hidden_layers": 1,
58
  "patch_size": 14,
59
  "torch_dtype": "float32",
60
  "vision_use_head": false
61
  }
62
- }
 
23
  "hidden_size": 2560,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 10240,
26
+ "max_position_embeddings": 131072,
27
  "model_type": "gemma3_text",
28
+ "num_attention_heads": 8,
29
+ "num_hidden_layers": 34,
30
+ "num_key_value_heads": 4,
31
  "query_pre_attn_scalar": 256,
32
  "rms_norm_eps": 1e-06,
33
  "rope_local_base_freq": 10000.0,
 
52
  "intermediate_size": 4304,
53
  "layer_norm_eps": 1e-06,
54
  "model_type": "siglip_vision_model",
55
+ "num_attention_heads": 16,
56
  "num_channels": 3,
57
+ "num_hidden_layers": 27,
58
  "patch_size": 14,
59
  "torch_dtype": "float32",
60
  "vision_use_head": false
61
  }
62
+ }