lmassaron commited on
Commit
8991a0b
·
verified ·
1 Parent(s): 237fbc8

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -25,15 +25,15 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "q_proj",
29
- "up_proj",
30
- "o_proj",
31
  "v_proj",
 
32
  "gate_proj",
33
  "down_proj",
34
- "lm_head",
35
- "embed_tokens",
36
- "k_proj"
37
  ],
38
  "target_parameters": null,
39
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "embed_tokens",
29
+ "lm_head",
30
  "q_proj",
 
 
31
  "v_proj",
32
+ "k_proj",
33
  "gate_proj",
34
  "down_proj",
35
+ "o_proj",
36
+ "up_proj"
 
37
  ],
38
  "target_parameters": null,
39
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed8b924052fe2267113a50edee018c577ac7885245869292906f954a4a8c00e3
3
  size 1293936232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f807803a2d50c80fe64843445713b36cd483e702b365da30fe9e112d1a5cc98
3
  size 1293936232
special_tokens_map.json CHANGED
@@ -20,7 +20,13 @@
20
  "single_word": false
21
  },
22
  "eoi_token": "<end_of_image>",
23
- "eos_token": "<eos>",
 
 
 
 
 
 
24
  "image_token": "<image_soft_token>",
25
  "pad_token": {
26
  "content": "<pad>",
 
20
  "single_word": false
21
  },
22
  "eoi_token": "<end_of_image>",
23
+ "eos_token": {
24
+ "content": "<eos>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
  "image_token": "<image_soft_token>",
31
  "pad_token": {
32
  "content": "<pad>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fc68671f86c1acc9e3dd33dc16b7ed34f69682b526c2d89a0c0b92ec788e05f2
3
  size 6161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d92dc56ef884e150175f056e4f940c468c840b76c5ef1b826eb8610b9220304
3
  size 6161