lmmy commited on
Commit
3bb2632
·
verified ·
1 Parent(s): a4033dd

Add files using upload-large-folder tool

Browse files
config.json CHANGED
The diff for this file is too large to render. See raw diff
 
generation_config.json CHANGED
@@ -9,5 +9,5 @@
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
- "transformers_version": "4.53.0.dev0"
13
  }
 
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
+ "transformers_version": "4.54.0.dev0"
13
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1376e43bb0b05f36692b0696370f97541810cb3d59afe04c9cde2ee07146a1a
3
- size 5367794792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b533499e8b4328bec343240e1d1701f730aae0967ec6d9f0acbeb73a5edff0dc
3
+ size 5350508248
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:312e771912ca9de1f7e5fe6c58e0da757adc3367630fe1b7191e52ebe20c1caf
3
- size 688452198
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:737c6bb43f74f54526678bbaafb820d48f12be5f8510c7ca39f84b5aa2c4d2bb
3
+ size 1344190004
model.safetensors.index.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_parameters": 5976833408,
4
- "total_size": 10878876416
5
  },
6
  "weight_map": {
7
  "model.audio_tower.conformer.0.attention.attn.k_proj.weight": "model-00001-of-00003.safetensors",
@@ -1553,6 +1552,7 @@
1553
  "model.vision_tower.timm_model.blocks.3.9.layer_scale.gamma": "model-00001-of-00003.safetensors",
1554
  "model.vision_tower.timm_model.blocks.3.9.norm.weight": "model-00001-of-00003.safetensors",
1555
  "model.vision_tower.timm_model.conv_stem.bn.weight": "model-00001-of-00003.safetensors",
 
1556
  "model.vision_tower.timm_model.conv_stem.conv.weight": "model-00001-of-00003.safetensors",
1557
  "model.vision_tower.timm_model.msfa.ffn.pw_exp.bn.weight": "model-00001-of-00003.safetensors",
1558
  "model.vision_tower.timm_model.msfa.ffn.pw_exp.conv.weight": "model-00001-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 10878876544
 
4
  },
5
  "weight_map": {
6
  "model.audio_tower.conformer.0.attention.attn.k_proj.weight": "model-00001-of-00003.safetensors",
 
1552
  "model.vision_tower.timm_model.blocks.3.9.layer_scale.gamma": "model-00001-of-00003.safetensors",
1553
  "model.vision_tower.timm_model.blocks.3.9.norm.weight": "model-00001-of-00003.safetensors",
1554
  "model.vision_tower.timm_model.conv_stem.bn.weight": "model-00001-of-00003.safetensors",
1555
+ "model.vision_tower.timm_model.conv_stem.conv.bias": "model-00001-of-00003.safetensors",
1556
  "model.vision_tower.timm_model.conv_stem.conv.weight": "model-00001-of-00003.safetensors",
1557
  "model.vision_tower.timm_model.msfa.ffn.pw_exp.bn.weight": "model-00001-of-00003.safetensors",
1558
  "model.vision_tower.timm_model.msfa.ffn.pw_exp.conv.weight": "model-00001-of-00003.safetensors",
preprocessor_config.json CHANGED
@@ -3,6 +3,7 @@
3
  "data_format": "channels_first",
4
  "default_to_square": false,
5
  "device": null,
 
6
  "dither": 0.0,
7
  "do_center_crop": null,
8
  "do_convert_rgb": null,
@@ -40,7 +41,7 @@
40
  "processor_class": "Gemma3nProcessor",
41
  "resample": 2,
42
  "rescale_factor": 0.00392156862745098,
43
- "return_attention_mask": false,
44
  "return_tensors": null,
45
  "sampling_rate": 16000,
46
  "size": {
 
3
  "data_format": "channels_first",
4
  "default_to_square": false,
5
  "device": null,
6
+ "disable_grouping": null,
7
  "dither": 0.0,
8
  "do_center_crop": null,
9
  "do_convert_rgb": null,
 
41
  "processor_class": "Gemma3nProcessor",
42
  "resample": 2,
43
  "rescale_factor": 0.00392156862745098,
44
+ "return_attention_mask": true,
45
  "return_tensors": null,
46
  "sampling_rate": 16000,
47
  "size": {