Training in progress, epoch 1
Browse files
adapter_config.json
CHANGED
@@ -28,15 +28,15 @@
|
|
28 |
"target_modules": [
|
29 |
"down_proj",
|
30 |
"out_proj",
|
31 |
-
"k_proj",
|
32 |
-
"up_proj",
|
33 |
-
"fc2",
|
34 |
-
"fc1",
|
35 |
"o_proj",
|
36 |
-
"v_proj",
|
37 |
-
"lm_head",
|
38 |
"gate_proj",
|
39 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
],
|
41 |
"task_type": "CAUSAL_LM",
|
42 |
"use_dora": false,
|
|
|
28 |
"target_modules": [
|
29 |
"down_proj",
|
30 |
"out_proj",
|
|
|
|
|
|
|
|
|
31 |
"o_proj",
|
|
|
|
|
32 |
"gate_proj",
|
33 |
+
"fc1",
|
34 |
+
"v_proj",
|
35 |
+
"up_proj",
|
36 |
+
"k_proj",
|
37 |
+
"q_proj",
|
38 |
+
"fc2",
|
39 |
+
"lm_head"
|
40 |
],
|
41 |
"task_type": "CAUSAL_LM",
|
42 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2839124552
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:73a0cd2c7ea68bf3faf05f0cbdc3b8c48faa0afa058f4828618368e7e07c5745
|
3 |
size 2839124552
|
runs/Jul21_11-21-07_gpu-dom-cmlre/events.out.tfevents.1753077070.gpu-dom-cmlre
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00aeddf933a85d0a2158d5527780f1c764568d7f4490f492addee22284d6545e
|
3 |
+
size 423208
|
runs/Jul22_18-44-59_gpu-dom-cmlre/events.out.tfevents.1753190101.gpu-dom-cmlre
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:26ba381324fa1f8cf682760326f9ea30a6ff3947f5f3cda30b08baf647da29d7
|
3 |
+
size 57333
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5624
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c3f21fcd695d1af2f3abaa74b859d72062324a61db29d36060a886342ba157e1
|
3 |
size 5624
|