Training in progress, epoch 1
Browse files
    	
        adapter_config.json
    CHANGED
    
    | @@ -27,16 +27,16 @@ | |
| 27 | 
             
              "rank_pattern": {},
         | 
| 28 | 
             
              "revision": null,
         | 
| 29 | 
             
              "target_modules": [
         | 
| 30 | 
            -
                "q_proj",
         | 
| 31 | 
            -
                "down_proj",
         | 
| 32 | 
            -
                "fc1",
         | 
| 33 | 
            -
                "out_proj",
         | 
| 34 | 
             
                "up_proj",
         | 
| 35 | 
             
                "k_proj",
         | 
|  | |
|  | |
|  | |
| 36 | 
             
                "o_proj",
         | 
|  | |
| 37 | 
             
                "v_proj",
         | 
| 38 | 
            -
                " | 
| 39 | 
            -
                " | 
| 40 | 
             
              ],
         | 
| 41 | 
             
              "task_type": "CAUSAL_LM",
         | 
| 42 | 
             
              "trainable_token_indices": null,
         | 
|  | |
| 27 | 
             
              "rank_pattern": {},
         | 
| 28 | 
             
              "revision": null,
         | 
| 29 | 
             
              "target_modules": [
         | 
|  | |
|  | |
|  | |
|  | |
| 30 | 
             
                "up_proj",
         | 
| 31 | 
             
                "k_proj",
         | 
| 32 | 
            +
                "q_proj",
         | 
| 33 | 
            +
                "gate_proj",
         | 
| 34 | 
            +
                "out_proj",
         | 
| 35 | 
             
                "o_proj",
         | 
| 36 | 
            +
                "fc2",
         | 
| 37 | 
             
                "v_proj",
         | 
| 38 | 
            +
                "fc1",
         | 
| 39 | 
            +
                "down_proj"
         | 
| 40 | 
             
              ],
         | 
| 41 | 
             
              "task_type": "CAUSAL_LM",
         | 
| 42 | 
             
              "trainable_token_indices": null,
         | 
    	
        adapter_model.safetensors
    CHANGED
    
    | @@ -1,3 +1,3 @@ | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            -
            oid sha256: | 
| 3 | 
             
            size 4324331232
         | 
|  | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:4f38b9e85c15e00477eba0358d541f53bc2c5cebd0f23ccc74d9290e87c65ecf
         | 
| 3 | 
             
            size 4324331232
         | 
    	
        runs/Jul21_17-22-42_safe-gpu02/events.out.tfevents.1753111368.safe-gpu02.1091162.0
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:3386965d3a63372f564ffc92ef7a6d5fcc8e38e4501ebff99ca476063d37bca9
         | 
| 3 | 
            +
            size 6797
         | 
    	
        runs/Jul21_17-30-47_safe-gpu02/events.out.tfevents.1753111851.safe-gpu02.1093920.0
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:baf6c62562c33604773434d4767454db67bc01c99f55f05555000603beb26568
         | 
| 3 | 
            +
            size 7337
         | 
    	
        training_args.bin
    CHANGED
    
    | @@ -1,3 +1,3 @@ | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            -
            oid sha256: | 
| 3 | 
             
            size 6033
         | 
|  | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:7c0775daa28661f2e9501faeda430b93b2276849d28e055179715e2026d7ab51
         | 
| 3 | 
             
            size 6033
         |