Pbeau commited on
Commit
57df38c
·
verified ·
1 Parent(s): bc0b954

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -28,10 +28,10 @@
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
31
- "v_proj",
32
- "k_proj",
33
  "o_proj",
34
- "q_proj"
 
 
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
 
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
 
 
31
  "o_proj",
32
+ "q_proj",
33
+ "k_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffa63ab603fd1d42694df8e435a5c5bfcc268848a269a417ca5f7358e86b693e
3
  size 60189176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95d309474db6d0a514ecec7a5f46aea3be3369fc3a097afb711215bae7c4ddb8
3
  size 60189176
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d650ac9ef973dfd4fde70cb004960e266b104d02f8bc81ead1bae8ab39206c1f
3
  size 6161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69fa983ce73898850fd66da627898598ab3c42bad3cf9f1b0a5ddd792090a880
3
  size 6161