danielhanchen commited on
Commit
8e673d0
·
verified ·
1 Parent(s): 4930133

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<image_soft_token>": 262144
3
+ }
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "attn_logit_softcapping": null,
8
  "bos_token_id": 2,
9
  "cache_implementation": "hybrid",
10
- "eos_token_id": 1,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
@@ -28,7 +28,8 @@
28
  "sliding_window": 512,
29
  "sliding_window_pattern": 6,
30
  "torch_dtype": "bfloat16",
31
- "transformers_version": "4.52.0.dev0",
 
32
  "use_cache": true,
33
  "vocab_size": 262144
34
  }
 
7
  "attn_logit_softcapping": null,
8
  "bos_token_id": 2,
9
  "cache_implementation": "hybrid",
10
+ "eos_token_id": 106,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
 
28
  "sliding_window": 512,
29
  "sliding_window_pattern": 6,
30
  "torch_dtype": "bfloat16",
31
+ "transformers_version": "4.51.3",
32
+ "unsloth_fixed": true,
33
  "use_cache": true,
34
  "vocab_size": 262144
35
  }
generation_config.json CHANGED
@@ -1,11 +1,14 @@
1
  {
 
2
  "cache_implementation": "hybrid",
3
  "do_sample": true,
4
  "eos_token_id": [
5
  1,
6
  106
7
  ],
 
 
8
  "top_k": 64,
9
  "top_p": 0.95,
10
- "transformers_version": "4.52.0.dev0"
11
  }
 
1
  {
2
+ "bos_token_id": 2,
3
  "cache_implementation": "hybrid",
4
  "do_sample": true,
5
  "eos_token_id": [
6
  1,
7
  106
8
  ],
9
+ "max_length": 32768,
10
+ "pad_token_id": 0,
11
  "top_k": 64,
12
  "top_p": 0.95,
13
+ "transformers_version": "4.51.3"
14
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aba68b2ae82099abbd81a745d4c4afbfcf207f4e34eddc902a1a63f332aca998
3
- size 3999582960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d571889049c5550a2cdcc3e1846646e595b683ce0d3c7bea904ed8874cf8ef2
3
+ size 1999811208
special_tokens_map.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "boi_token": "<start_of_image>",
3
+ "bos_token": {
4
+ "content": "<bos>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ "eoi_token": "<end_of_image>",
11
+ "eos_token": {
12
+ "content": "<end_of_turn>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "image_token": "<image_soft_token>",
19
+ "pad_token": {
20
+ "content": "<pad>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ },
26
+ "unk_token": {
27
+ "content": "<unk>",
28
+ "lstrip": false,
29
+ "normalized": false,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ }
33
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4667f2089529e8e7657cfb6d1c19910ae71ff5f28aa7ab2ff2763330affad795
3
+ size 33384568
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1299c11d7cf632ef3b4e11937501358ada021bbdf7c47638d13c0ee982f2e79c
3
+ size 4689074
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff