kacperbb commited on
Commit
c5011fd
·
verified ·
1 Parent(s): ed567d6

Upload folder using huggingface_hub

Browse files
0000100_adapters.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99275945e4d5d45f85d0cbc5225e746e1b0f84fe1e0abc174b203129cdbe526a
3
+ size 6294991
0000200_adapters.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c35dbe489253207f956d51d85deca0abebf31edab52fbe293cf72df8e98034b8
3
+ size 6294991
0000300_adapters.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:431f08f65c7318e756c333e14ac0dd5b8b6481e6c9065e8dd692b03f75fd4fe5
3
+ size 6294991
adapter_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "adapter_path": "./phi35_mlx_lora",
3
+ "batch_size": 4,
4
+ "config": null,
5
+ "data": "./data",
6
+ "fine_tune_type": "lora",
7
+ "grad_checkpoint": true,
8
+ "iters": 300,
9
+ "learning_rate": 1e-05,
10
+ "lora_parameters": {
11
+ "rank": 8,
12
+ "dropout": 0.0,
13
+ "scale": 20.0
14
+ },
15
+ "lr_schedule": null,
16
+ "mask_prompt": false,
17
+ "max_seq_length": 512,
18
+ "model": "./models/phi-3.5-mini",
19
+ "num_layers": 16,
20
+ "optimizer": "adam",
21
+ "optimizer_config": {
22
+ "adam": {},
23
+ "adamw": {}
24
+ },
25
+ "resume_adapter_file": null,
26
+ "save_every": 100,
27
+ "seed": 0,
28
+ "steps_per_eval": 200,
29
+ "steps_per_report": 10,
30
+ "test": false,
31
+ "test_batches": 500,
32
+ "train": true,
33
+ "val_batches": 25,
34
+ "wandb": null
35
+ }
adapters.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:431f08f65c7318e756c333e14ac0dd5b8b6481e6c9065e8dd692b03f75fd4fe5
3
+ size 6294991