Upload Qwen2_5_VLForConditionalGeneration

#2
by albertvillanova HF Staff - opened
Files changed (3) hide show
  1. config.json +3 -37
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -4,6 +4,7 @@
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
 
7
  "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 2048,
@@ -32,6 +33,7 @@
32
  ],
33
  "attention_dropout": 0.0,
34
  "bos_token_id": 151643,
 
35
  "eos_token_id": 151645,
36
  "hidden_act": "silu",
37
  "hidden_size": 16,
@@ -39,40 +41,6 @@
39
  "initializer_range": 0.02,
40
  "intermediate_size": 11008,
41
  "layer_types": [
42
- "full_attention",
43
- "full_attention",
44
- "full_attention",
45
- "full_attention",
46
- "full_attention",
47
- "full_attention",
48
- "full_attention",
49
- "full_attention",
50
- "full_attention",
51
- "full_attention",
52
- "full_attention",
53
- "full_attention",
54
- "full_attention",
55
- "full_attention",
56
- "full_attention",
57
- "full_attention",
58
- "full_attention",
59
- "full_attention",
60
- "full_attention",
61
- "full_attention",
62
- "full_attention",
63
- "full_attention",
64
- "full_attention",
65
- "full_attention",
66
- "full_attention",
67
- "full_attention",
68
- "full_attention",
69
- "full_attention",
70
- "full_attention",
71
- "full_attention",
72
- "full_attention",
73
- "full_attention",
74
- "full_attention",
75
- "full_attention",
76
  "full_attention",
77
  "full_attention"
78
  ],
@@ -93,7 +61,6 @@
93
  "rope_theta": 1000000.0,
94
  "sliding_window": null,
95
  "tie_word_embeddings": true,
96
- "torch_dtype": "bfloat16",
97
  "use_cache": true,
98
  "use_sliding_window": false,
99
  "video_token_id": null,
@@ -102,8 +69,7 @@
102
  "vision_token_id": 151654,
103
  "vocab_size": 151936
104
  },
105
- "torch_dtype": "bfloat16",
106
- "transformers_version": "4.56.0.dev0",
107
  "use_cache": true,
108
  "use_sliding_window": false,
109
  "video_token_id": 151656,
 
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
+ "dtype": "bfloat16",
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 2048,
 
33
  ],
34
  "attention_dropout": 0.0,
35
  "bos_token_id": 151643,
36
+ "dtype": "bfloat16",
37
  "eos_token_id": 151645,
38
  "hidden_act": "silu",
39
  "hidden_size": 16,
 
41
  "initializer_range": 0.02,
42
  "intermediate_size": 11008,
43
  "layer_types": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
  "full_attention",
45
  "full_attention"
46
  ],
 
61
  "rope_theta": 1000000.0,
62
  "sliding_window": null,
63
  "tie_word_embeddings": true,
 
64
  "use_cache": true,
65
  "use_sliding_window": false,
66
  "video_token_id": null,
 
69
  "vision_token_id": 151654,
70
  "vocab_size": 151936
71
  },
72
+ "transformers_version": "4.56.1",
 
73
  "use_cache": true,
74
  "use_sliding_window": false,
75
  "video_token_id": 151656,
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
- "transformers_version": "4.56.0.dev0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
+ "transformers_version": "4.56.1"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:64f6d603334a11233717f2c62832c863394fc51cdbdabbd6a25dcdc4a6f26c8d
3
  size 18086192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f2af27576d9c46218d01c601ba63e99c39fa2ce0e7c817013c1b144864989e4
3
  size 18086192